{ "best_metric": 0.8912756443023682, "best_model_checkpoint": "autotrain-obj-det-cppe5-1/checkpoint-12125", "epoch": 97.0, "eval_steps": 500, "global_step": 12125, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.008, "grad_norm": NaN, "learning_rate": 0.0, "loss": 3.7018, "step": 1 }, { "epoch": 0.016, "grad_norm": NaN, "learning_rate": 0.0, "loss": 4.0323, "step": 2 }, { "epoch": 0.024, "grad_norm": Infinity, "learning_rate": 0.0, "loss": 4.0947, "step": 3 }, { "epoch": 0.032, "grad_norm": 72.00308227539062, "learning_rate": 4e-08, "loss": 3.604, "step": 4 }, { "epoch": 0.04, "grad_norm": 86.05225372314453, "learning_rate": 8e-08, "loss": 4.2135, "step": 5 }, { "epoch": 0.048, "grad_norm": 314.2935791015625, "learning_rate": 1.2e-07, "loss": 3.5784, "step": 6 }, { "epoch": 0.056, "grad_norm": 53.59658432006836, "learning_rate": 1.6e-07, "loss": 3.3972, "step": 7 }, { "epoch": 0.064, "grad_norm": 104.16551208496094, "learning_rate": 2.0000000000000002e-07, "loss": 3.5234, "step": 8 }, { "epoch": 0.072, "grad_norm": Infinity, "learning_rate": 2.0000000000000002e-07, "loss": 3.9419, "step": 9 }, { "epoch": 0.08, "grad_norm": 117.49606323242188, "learning_rate": 2.4e-07, "loss": 3.8196, "step": 10 }, { "epoch": 0.088, "grad_norm": 93.0835189819336, "learning_rate": 2.8e-07, "loss": 4.7333, "step": 11 }, { "epoch": 0.096, "grad_norm": 112.12532043457031, "learning_rate": 3.2e-07, "loss": 3.8864, "step": 12 }, { "epoch": 0.104, "grad_norm": 150.11375427246094, "learning_rate": 3.6e-07, "loss": 3.9885, "step": 13 }, { "epoch": 0.112, "grad_norm": 227.05191040039062, "learning_rate": 4.0000000000000003e-07, "loss": 4.1237, "step": 14 }, { "epoch": 0.12, "grad_norm": 74.49281311035156, "learning_rate": 4.4e-07, "loss": 4.5055, "step": 15 }, { "epoch": 0.128, "grad_norm": 70.38601684570312, "learning_rate": 4.8e-07, "loss": 4.1448, "step": 16 }, { "epoch": 0.136, "grad_norm": 92.2749252319336, "learning_rate": 5.2e-07, "loss": 3.9241, "step": 17 }, { "epoch": 0.144, "grad_norm": NaN, "learning_rate": 5.2e-07, "loss": 4.1381, "step": 18 }, { "epoch": 0.152, "grad_norm": 88.27749633789062, "learning_rate": 5.6e-07, "loss": 4.3647, "step": 19 }, { "epoch": 0.16, "grad_norm": 237.92710876464844, "learning_rate": 6.000000000000001e-07, "loss": 4.2348, "step": 20 }, { "epoch": 0.168, "grad_norm": 89.40242004394531, "learning_rate": 6.4e-07, "loss": 3.9574, "step": 21 }, { "epoch": 0.176, "grad_norm": 141.68023681640625, "learning_rate": 6.8e-07, "loss": 4.0413, "step": 22 }, { "epoch": 0.184, "grad_norm": 200.5878448486328, "learning_rate": 7.2e-07, "loss": 4.0929, "step": 23 }, { "epoch": 0.192, "grad_norm": 424.9366760253906, "learning_rate": 7.6e-07, "loss": 3.9013, "step": 24 }, { "epoch": 0.2, "grad_norm": 57.161739349365234, "learning_rate": 8.000000000000001e-07, "loss": 4.0213, "step": 25 }, { "epoch": 0.208, "grad_norm": 174.53646850585938, "learning_rate": 8.4e-07, "loss": 3.7886, "step": 26 }, { "epoch": 0.216, "grad_norm": 69.44010162353516, "learning_rate": 8.8e-07, "loss": 3.8993, "step": 27 }, { "epoch": 0.224, "grad_norm": 245.93280029296875, "learning_rate": 9.2e-07, "loss": 3.452, "step": 28 }, { "epoch": 0.232, "grad_norm": 120.91592407226562, "learning_rate": 9.6e-07, "loss": 4.3956, "step": 29 }, { "epoch": 0.24, "grad_norm": 180.25108337402344, "learning_rate": 1.0000000000000002e-06, "loss": 3.8303, "step": 30 }, { "epoch": 0.248, "grad_norm": Infinity, "learning_rate": 1.0000000000000002e-06, "loss": 4.367, "step": 31 }, { "epoch": 0.256, "grad_norm": 172.37583923339844, "learning_rate": 1.04e-06, "loss": 3.4272, "step": 32 }, { "epoch": 0.264, "grad_norm": 270.00872802734375, "learning_rate": 1.08e-06, "loss": 3.6336, "step": 33 }, { "epoch": 0.272, "grad_norm": 444.2965393066406, "learning_rate": 1.12e-06, "loss": 3.3881, "step": 34 }, { "epoch": 0.28, "grad_norm": 88.26665496826172, "learning_rate": 1.16e-06, "loss": 3.6739, "step": 35 }, { "epoch": 0.288, "grad_norm": 834.8115234375, "learning_rate": 1.2000000000000002e-06, "loss": 4.7353, "step": 36 }, { "epoch": 0.296, "grad_norm": 533.33544921875, "learning_rate": 1.24e-06, "loss": 4.5055, "step": 37 }, { "epoch": 0.304, "grad_norm": 86.67887115478516, "learning_rate": 1.28e-06, "loss": 3.3824, "step": 38 }, { "epoch": 0.312, "grad_norm": Infinity, "learning_rate": 1.28e-06, "loss": 3.9694, "step": 39 }, { "epoch": 0.32, "grad_norm": 165.56396484375, "learning_rate": 1.32e-06, "loss": 3.7988, "step": 40 }, { "epoch": 0.328, "grad_norm": 584.3043212890625, "learning_rate": 1.36e-06, "loss": 3.9675, "step": 41 }, { "epoch": 0.336, "grad_norm": 109.798095703125, "learning_rate": 1.4000000000000001e-06, "loss": 3.5193, "step": 42 }, { "epoch": 0.344, "grad_norm": 209.11526489257812, "learning_rate": 1.44e-06, "loss": 3.5429, "step": 43 }, { "epoch": 0.352, "grad_norm": 376.80413818359375, "learning_rate": 1.4800000000000002e-06, "loss": 3.7386, "step": 44 }, { "epoch": 0.36, "grad_norm": 217.63381958007812, "learning_rate": 1.52e-06, "loss": 4.3986, "step": 45 }, { "epoch": 0.368, "grad_norm": 180.84515380859375, "learning_rate": 1.56e-06, "loss": 3.6203, "step": 46 }, { "epoch": 0.376, "grad_norm": 124.13750457763672, "learning_rate": 1.6000000000000001e-06, "loss": 3.9809, "step": 47 }, { "epoch": 0.384, "grad_norm": 119.41777801513672, "learning_rate": 1.6400000000000002e-06, "loss": 3.5271, "step": 48 }, { "epoch": 0.392, "grad_norm": 148.10426330566406, "learning_rate": 1.68e-06, "loss": 3.6789, "step": 49 }, { "epoch": 0.4, "grad_norm": 139.5306396484375, "learning_rate": 1.72e-06, "loss": 4.3609, "step": 50 }, { "epoch": 0.408, "grad_norm": 130.8395538330078, "learning_rate": 1.76e-06, "loss": 3.6528, "step": 51 }, { "epoch": 0.416, "grad_norm": 249.2919158935547, "learning_rate": 1.8e-06, "loss": 3.8334, "step": 52 }, { "epoch": 0.424, "grad_norm": 296.0094909667969, "learning_rate": 1.84e-06, "loss": 3.9528, "step": 53 }, { "epoch": 0.432, "grad_norm": 114.58678436279297, "learning_rate": 1.8800000000000002e-06, "loss": 3.9503, "step": 54 }, { "epoch": 0.44, "grad_norm": 120.48788452148438, "learning_rate": 1.92e-06, "loss": 3.4655, "step": 55 }, { "epoch": 0.448, "grad_norm": 168.49826049804688, "learning_rate": 1.96e-06, "loss": 3.6357, "step": 56 }, { "epoch": 0.456, "grad_norm": 177.34811401367188, "learning_rate": 2.0000000000000003e-06, "loss": 4.7166, "step": 57 }, { "epoch": 0.464, "grad_norm": 320.8636474609375, "learning_rate": 2.0400000000000004e-06, "loss": 3.9693, "step": 58 }, { "epoch": 0.472, "grad_norm": 427.2450866699219, "learning_rate": 2.08e-06, "loss": 4.1987, "step": 59 }, { "epoch": 0.48, "grad_norm": 702.7684326171875, "learning_rate": 2.12e-06, "loss": 4.1251, "step": 60 }, { "epoch": 0.488, "grad_norm": 496.4269714355469, "learning_rate": 2.16e-06, "loss": 3.5033, "step": 61 }, { "epoch": 0.496, "grad_norm": 241.85308837890625, "learning_rate": 2.2e-06, "loss": 4.3612, "step": 62 }, { "epoch": 0.504, "grad_norm": 164.95864868164062, "learning_rate": 2.24e-06, "loss": 3.3108, "step": 63 }, { "epoch": 0.512, "grad_norm": 304.5556335449219, "learning_rate": 2.28e-06, "loss": 3.8685, "step": 64 }, { "epoch": 0.52, "grad_norm": 73.93318939208984, "learning_rate": 2.32e-06, "loss": 3.7668, "step": 65 }, { "epoch": 0.528, "grad_norm": 177.67469787597656, "learning_rate": 2.36e-06, "loss": 3.592, "step": 66 }, { "epoch": 0.536, "grad_norm": 444.97894287109375, "learning_rate": 2.4000000000000003e-06, "loss": 3.9706, "step": 67 }, { "epoch": 0.544, "grad_norm": 372.03668212890625, "learning_rate": 2.4400000000000004e-06, "loss": 3.5904, "step": 68 }, { "epoch": 0.552, "grad_norm": 44.786041259765625, "learning_rate": 2.48e-06, "loss": 4.0424, "step": 69 }, { "epoch": 0.56, "grad_norm": 139.39344787597656, "learning_rate": 2.52e-06, "loss": 4.0023, "step": 70 }, { "epoch": 0.568, "grad_norm": 735.4076538085938, "learning_rate": 2.56e-06, "loss": 3.4507, "step": 71 }, { "epoch": 0.576, "grad_norm": 116.331787109375, "learning_rate": 2.6e-06, "loss": 3.7258, "step": 72 }, { "epoch": 0.584, "grad_norm": 324.2991943359375, "learning_rate": 2.64e-06, "loss": 3.7667, "step": 73 }, { "epoch": 0.592, "grad_norm": 116.23673248291016, "learning_rate": 2.68e-06, "loss": 4.0329, "step": 74 }, { "epoch": 0.6, "grad_norm": 92.85771942138672, "learning_rate": 2.72e-06, "loss": 3.4763, "step": 75 }, { "epoch": 0.608, "grad_norm": 285.7426452636719, "learning_rate": 2.7600000000000003e-06, "loss": 3.4617, "step": 76 }, { "epoch": 0.616, "grad_norm": 329.1176452636719, "learning_rate": 2.8000000000000003e-06, "loss": 4.201, "step": 77 }, { "epoch": 0.624, "grad_norm": 436.0442199707031, "learning_rate": 2.8400000000000003e-06, "loss": 3.9963, "step": 78 }, { "epoch": 0.632, "grad_norm": 61.588340759277344, "learning_rate": 2.88e-06, "loss": 3.2176, "step": 79 }, { "epoch": 0.64, "grad_norm": 130.7513885498047, "learning_rate": 2.92e-06, "loss": 3.3593, "step": 80 }, { "epoch": 0.648, "grad_norm": 3263.361083984375, "learning_rate": 2.9600000000000005e-06, "loss": 3.7072, "step": 81 }, { "epoch": 0.656, "grad_norm": 80.83671569824219, "learning_rate": 3e-06, "loss": 3.4564, "step": 82 }, { "epoch": 0.664, "grad_norm": 895.2838134765625, "learning_rate": 3.04e-06, "loss": 3.7303, "step": 83 }, { "epoch": 0.672, "grad_norm": 152.3745880126953, "learning_rate": 3.08e-06, "loss": 3.7799, "step": 84 }, { "epoch": 0.68, "grad_norm": 435.84326171875, "learning_rate": 3.12e-06, "loss": 4.0362, "step": 85 }, { "epoch": 0.688, "grad_norm": 69.59569549560547, "learning_rate": 3.1600000000000007e-06, "loss": 4.6498, "step": 86 }, { "epoch": 0.696, "grad_norm": 102.4559097290039, "learning_rate": 3.2000000000000003e-06, "loss": 3.3812, "step": 87 }, { "epoch": 0.704, "grad_norm": 119.61168670654297, "learning_rate": 3.24e-06, "loss": 3.9542, "step": 88 }, { "epoch": 0.712, "grad_norm": 202.1793212890625, "learning_rate": 3.2800000000000004e-06, "loss": 3.9018, "step": 89 }, { "epoch": 0.72, "grad_norm": 77.74322509765625, "learning_rate": 3.3200000000000004e-06, "loss": 4.0028, "step": 90 }, { "epoch": 0.728, "grad_norm": 56.2535400390625, "learning_rate": 3.36e-06, "loss": 3.522, "step": 91 }, { "epoch": 0.736, "grad_norm": 846.2535400390625, "learning_rate": 3.4000000000000005e-06, "loss": 3.8272, "step": 92 }, { "epoch": 0.744, "grad_norm": 104.18599700927734, "learning_rate": 3.44e-06, "loss": 3.6689, "step": 93 }, { "epoch": 0.752, "grad_norm": 89.9948959350586, "learning_rate": 3.4799999999999997e-06, "loss": 3.721, "step": 94 }, { "epoch": 0.76, "grad_norm": 55.44136047363281, "learning_rate": 3.52e-06, "loss": 3.6936, "step": 95 }, { "epoch": 0.768, "grad_norm": 104.52263641357422, "learning_rate": 3.5600000000000002e-06, "loss": 4.1285, "step": 96 }, { "epoch": 0.776, "grad_norm": 265.9507141113281, "learning_rate": 3.6e-06, "loss": 3.4785, "step": 97 }, { "epoch": 0.784, "grad_norm": 90.54768371582031, "learning_rate": 3.6400000000000003e-06, "loss": 3.4104, "step": 98 }, { "epoch": 0.792, "grad_norm": 1598.3504638671875, "learning_rate": 3.68e-06, "loss": 3.7433, "step": 99 }, { "epoch": 0.8, "grad_norm": 193.03836059570312, "learning_rate": 3.72e-06, "loss": 4.0749, "step": 100 }, { "epoch": 0.808, "grad_norm": 637.2760009765625, "learning_rate": 3.7600000000000004e-06, "loss": 3.9205, "step": 101 }, { "epoch": 0.816, "grad_norm": 42.667396545410156, "learning_rate": 3.8e-06, "loss": 3.7892, "step": 102 }, { "epoch": 0.824, "grad_norm": 284.16595458984375, "learning_rate": 3.84e-06, "loss": 3.5732, "step": 103 }, { "epoch": 0.832, "grad_norm": 117.10783386230469, "learning_rate": 3.88e-06, "loss": 4.9533, "step": 104 }, { "epoch": 0.84, "grad_norm": 77.18022918701172, "learning_rate": 3.92e-06, "loss": 3.7556, "step": 105 }, { "epoch": 0.848, "grad_norm": 85.88460540771484, "learning_rate": 3.96e-06, "loss": 3.5773, "step": 106 }, { "epoch": 0.856, "grad_norm": 101.45319366455078, "learning_rate": 4.000000000000001e-06, "loss": 3.5753, "step": 107 }, { "epoch": 0.864, "grad_norm": 159.80442810058594, "learning_rate": 4.04e-06, "loss": 3.6294, "step": 108 }, { "epoch": 0.872, "grad_norm": 32.066551208496094, "learning_rate": 4.080000000000001e-06, "loss": 3.5787, "step": 109 }, { "epoch": 0.88, "grad_norm": 3936.433349609375, "learning_rate": 4.12e-06, "loss": 3.999, "step": 110 }, { "epoch": 0.888, "grad_norm": 79.8310317993164, "learning_rate": 4.16e-06, "loss": 3.2701, "step": 111 }, { "epoch": 0.896, "grad_norm": 47.406776428222656, "learning_rate": 4.2000000000000004e-06, "loss": 2.7967, "step": 112 }, { "epoch": 0.904, "grad_norm": 62.74674987792969, "learning_rate": 4.24e-06, "loss": 3.7586, "step": 113 }, { "epoch": 0.912, "grad_norm": 201.48736572265625, "learning_rate": 4.28e-06, "loss": 3.325, "step": 114 }, { "epoch": 0.92, "grad_norm": 111.7520523071289, "learning_rate": 4.32e-06, "loss": 3.2537, "step": 115 }, { "epoch": 0.928, "grad_norm": 125.17987823486328, "learning_rate": 4.360000000000001e-06, "loss": 3.3292, "step": 116 }, { "epoch": 0.936, "grad_norm": 448.0842590332031, "learning_rate": 4.4e-06, "loss": 3.483, "step": 117 }, { "epoch": 0.944, "grad_norm": 85.89250183105469, "learning_rate": 4.440000000000001e-06, "loss": 3.8747, "step": 118 }, { "epoch": 0.952, "grad_norm": 650.0930786132812, "learning_rate": 4.48e-06, "loss": 3.3595, "step": 119 }, { "epoch": 0.96, "grad_norm": 84.09048461914062, "learning_rate": 4.52e-06, "loss": 3.1434, "step": 120 }, { "epoch": 0.968, "grad_norm": 793.4928588867188, "learning_rate": 4.56e-06, "loss": 3.6641, "step": 121 }, { "epoch": 0.976, "grad_norm": 182.6153564453125, "learning_rate": 4.6e-06, "loss": 4.1812, "step": 122 }, { "epoch": 0.984, "grad_norm": 331.9185791015625, "learning_rate": 4.64e-06, "loss": 3.4885, "step": 123 }, { "epoch": 0.992, "grad_norm": 95.19418334960938, "learning_rate": 4.68e-06, "loss": 3.5326, "step": 124 }, { "epoch": 1.0, "grad_norm": 222.72003173828125, "learning_rate": 4.72e-06, "loss": 3.1463, "step": 125 }, { "epoch": 1.0, "eval_loss": 3.5120596885681152, "eval_map": 0.0031, "eval_map_50": 0.0061, "eval_map_75": 0.0026, "eval_map_Coverall": 0.0115, "eval_map_Face_Shield": 0.0005, "eval_map_Gloves": 0.0, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0036, "eval_map_large": 0.0118, "eval_map_medium": 0.0009, "eval_map_small": 0.0001, "eval_mar_1": 0.0182, "eval_mar_10": 0.0287, "eval_mar_100": 0.0597, "eval_mar_100_Coverall": 0.0467, "eval_mar_100_Face_Shield": 0.1059, "eval_mar_100_Gloves": 0.0, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.1462, "eval_mar_large": 0.1393, "eval_mar_medium": 0.0636, "eval_mar_small": 0.0039, "eval_runtime": 0.9275, "eval_samples_per_second": 31.267, "eval_steps_per_second": 2.156, "step": 125 }, { "epoch": 1.008, "grad_norm": 84.70052337646484, "learning_rate": 4.76e-06, "loss": 3.6251, "step": 126 }, { "epoch": 1.016, "grad_norm": 125.06646728515625, "learning_rate": 4.800000000000001e-06, "loss": 3.6648, "step": 127 }, { "epoch": 1.024, "grad_norm": 372.2067565917969, "learning_rate": 4.84e-06, "loss": 4.0376, "step": 128 }, { "epoch": 1.032, "grad_norm": 116.4840087890625, "learning_rate": 4.880000000000001e-06, "loss": 3.7012, "step": 129 }, { "epoch": 1.04, "grad_norm": 80.98541259765625, "learning_rate": 4.92e-06, "loss": 3.3051, "step": 130 }, { "epoch": 1.048, "grad_norm": 116.7375717163086, "learning_rate": 4.96e-06, "loss": 3.5677, "step": 131 }, { "epoch": 1.056, "grad_norm": 197.94955444335938, "learning_rate": 5e-06, "loss": 3.8929, "step": 132 }, { "epoch": 1.064, "grad_norm": 109.25666809082031, "learning_rate": 5.04e-06, "loss": 2.9213, "step": 133 }, { "epoch": 1.072, "grad_norm": 86.17569732666016, "learning_rate": 5.08e-06, "loss": 3.513, "step": 134 }, { "epoch": 1.08, "grad_norm": 85.52006530761719, "learning_rate": 5.12e-06, "loss": 3.8417, "step": 135 }, { "epoch": 1.088, "grad_norm": 115.45555114746094, "learning_rate": 5.1600000000000006e-06, "loss": 3.3746, "step": 136 }, { "epoch": 1.096, "grad_norm": 80.0805435180664, "learning_rate": 5.2e-06, "loss": 3.44, "step": 137 }, { "epoch": 1.104, "grad_norm": 261.8367614746094, "learning_rate": 5.240000000000001e-06, "loss": 3.6412, "step": 138 }, { "epoch": 1.112, "grad_norm": 165.19970703125, "learning_rate": 5.28e-06, "loss": 3.5373, "step": 139 }, { "epoch": 1.12, "grad_norm": 126.93994903564453, "learning_rate": 5.32e-06, "loss": 2.8087, "step": 140 }, { "epoch": 1.1280000000000001, "grad_norm": 89.39124298095703, "learning_rate": 5.36e-06, "loss": 3.0533, "step": 141 }, { "epoch": 1.1360000000000001, "grad_norm": 87.90418243408203, "learning_rate": 5.4e-06, "loss": 3.5022, "step": 142 }, { "epoch": 1.144, "grad_norm": 102.79830169677734, "learning_rate": 5.44e-06, "loss": 3.288, "step": 143 }, { "epoch": 1.152, "grad_norm": 105.15153503417969, "learning_rate": 5.48e-06, "loss": 3.0131, "step": 144 }, { "epoch": 1.16, "grad_norm": 138.8660125732422, "learning_rate": 5.5200000000000005e-06, "loss": 3.6495, "step": 145 }, { "epoch": 1.168, "grad_norm": 298.7010803222656, "learning_rate": 5.56e-06, "loss": 3.1893, "step": 146 }, { "epoch": 1.176, "grad_norm": 86.14437866210938, "learning_rate": 5.600000000000001e-06, "loss": 3.7527, "step": 147 }, { "epoch": 1.184, "grad_norm": 338.2149353027344, "learning_rate": 5.64e-06, "loss": 2.9979, "step": 148 }, { "epoch": 1.192, "grad_norm": 275.2916259765625, "learning_rate": 5.680000000000001e-06, "loss": 3.0881, "step": 149 }, { "epoch": 1.2, "grad_norm": 78.89794158935547, "learning_rate": 5.72e-06, "loss": 3.3139, "step": 150 }, { "epoch": 1.208, "grad_norm": 131.74676513671875, "learning_rate": 5.76e-06, "loss": 3.0943, "step": 151 }, { "epoch": 1.216, "grad_norm": 38.0937385559082, "learning_rate": 5.8e-06, "loss": 3.5883, "step": 152 }, { "epoch": 1.224, "grad_norm": 100.68189239501953, "learning_rate": 5.84e-06, "loss": 2.8585, "step": 153 }, { "epoch": 1.232, "grad_norm": 136.72055053710938, "learning_rate": 5.8800000000000005e-06, "loss": 3.4184, "step": 154 }, { "epoch": 1.24, "grad_norm": 78.2752685546875, "learning_rate": 5.920000000000001e-06, "loss": 3.3572, "step": 155 }, { "epoch": 1.248, "grad_norm": 72.71641540527344, "learning_rate": 5.9600000000000005e-06, "loss": 3.4514, "step": 156 }, { "epoch": 1.256, "grad_norm": 115.14334106445312, "learning_rate": 6e-06, "loss": 2.829, "step": 157 }, { "epoch": 1.264, "grad_norm": 57.92106628417969, "learning_rate": 6.040000000000001e-06, "loss": 3.202, "step": 158 }, { "epoch": 1.272, "grad_norm": 80.06271362304688, "learning_rate": 6.08e-06, "loss": 3.3225, "step": 159 }, { "epoch": 1.28, "grad_norm": 41.85452651977539, "learning_rate": 6.12e-06, "loss": 3.0887, "step": 160 }, { "epoch": 1.288, "grad_norm": 88.06126403808594, "learning_rate": 6.16e-06, "loss": 2.8341, "step": 161 }, { "epoch": 1.296, "grad_norm": 96.79623413085938, "learning_rate": 6.2e-06, "loss": 3.5681, "step": 162 }, { "epoch": 1.304, "grad_norm": 375.60693359375, "learning_rate": 6.24e-06, "loss": 2.9837, "step": 163 }, { "epoch": 1.312, "grad_norm": 657.2738647460938, "learning_rate": 6.28e-06, "loss": 3.1551, "step": 164 }, { "epoch": 1.32, "grad_norm": 155.08616638183594, "learning_rate": 6.320000000000001e-06, "loss": 3.327, "step": 165 }, { "epoch": 1.328, "grad_norm": 261.8237609863281, "learning_rate": 6.360000000000001e-06, "loss": 3.0741, "step": 166 }, { "epoch": 1.336, "grad_norm": 108.08387756347656, "learning_rate": 6.4000000000000006e-06, "loss": 3.1671, "step": 167 }, { "epoch": 1.3439999999999999, "grad_norm": 134.01795959472656, "learning_rate": 6.44e-06, "loss": 2.8741, "step": 168 }, { "epoch": 1.3519999999999999, "grad_norm": 64.47674560546875, "learning_rate": 6.48e-06, "loss": 3.1561, "step": 169 }, { "epoch": 1.3599999999999999, "grad_norm": 83.58087921142578, "learning_rate": 6.519999999999999e-06, "loss": 3.1843, "step": 170 }, { "epoch": 1.3679999999999999, "grad_norm": 67.0985107421875, "learning_rate": 6.560000000000001e-06, "loss": 2.9098, "step": 171 }, { "epoch": 1.376, "grad_norm": 62.42311096191406, "learning_rate": 6.6e-06, "loss": 3.0144, "step": 172 }, { "epoch": 1.384, "grad_norm": 94.13452911376953, "learning_rate": 6.640000000000001e-06, "loss": 2.8587, "step": 173 }, { "epoch": 1.392, "grad_norm": 340.2109375, "learning_rate": 6.68e-06, "loss": 2.9862, "step": 174 }, { "epoch": 1.4, "grad_norm": 102.3532943725586, "learning_rate": 6.72e-06, "loss": 3.1799, "step": 175 }, { "epoch": 1.408, "grad_norm": 133.93849182128906, "learning_rate": 6.76e-06, "loss": 2.7653, "step": 176 }, { "epoch": 1.416, "grad_norm": 224.46104431152344, "learning_rate": 6.800000000000001e-06, "loss": 4.0781, "step": 177 }, { "epoch": 1.424, "grad_norm": 64.05711364746094, "learning_rate": 6.840000000000001e-06, "loss": 3.1631, "step": 178 }, { "epoch": 1.432, "grad_norm": 82.06625366210938, "learning_rate": 6.88e-06, "loss": 3.0375, "step": 179 }, { "epoch": 1.44, "grad_norm": 71.01380920410156, "learning_rate": 6.92e-06, "loss": 2.7628, "step": 180 }, { "epoch": 1.448, "grad_norm": 81.0610580444336, "learning_rate": 6.9599999999999994e-06, "loss": 3.0142, "step": 181 }, { "epoch": 1.456, "grad_norm": 97.06727600097656, "learning_rate": 7.000000000000001e-06, "loss": 2.8979, "step": 182 }, { "epoch": 1.464, "grad_norm": 84.16802978515625, "learning_rate": 7.04e-06, "loss": 2.9088, "step": 183 }, { "epoch": 1.472, "grad_norm": 163.4725341796875, "learning_rate": 7.080000000000001e-06, "loss": 2.8773, "step": 184 }, { "epoch": 1.48, "grad_norm": 44.490196228027344, "learning_rate": 7.1200000000000004e-06, "loss": 3.0589, "step": 185 }, { "epoch": 1.488, "grad_norm": 88.4025650024414, "learning_rate": 7.16e-06, "loss": 3.0106, "step": 186 }, { "epoch": 1.496, "grad_norm": 80.32359313964844, "learning_rate": 7.2e-06, "loss": 3.0629, "step": 187 }, { "epoch": 1.504, "grad_norm": 293.8106994628906, "learning_rate": 7.240000000000001e-06, "loss": 2.5767, "step": 188 }, { "epoch": 1.512, "grad_norm": 197.84689331054688, "learning_rate": 7.280000000000001e-06, "loss": 3.2491, "step": 189 }, { "epoch": 1.52, "grad_norm": 140.30162048339844, "learning_rate": 7.32e-06, "loss": 2.7543, "step": 190 }, { "epoch": 1.528, "grad_norm": 415.7967834472656, "learning_rate": 7.36e-06, "loss": 3.2925, "step": 191 }, { "epoch": 1.536, "grad_norm": 99.22180938720703, "learning_rate": 7.4e-06, "loss": 3.1569, "step": 192 }, { "epoch": 1.544, "grad_norm": 175.50131225585938, "learning_rate": 7.44e-06, "loss": 2.5208, "step": 193 }, { "epoch": 1.552, "grad_norm": 68.04039001464844, "learning_rate": 7.480000000000001e-06, "loss": 3.0091, "step": 194 }, { "epoch": 1.56, "grad_norm": 287.7083740234375, "learning_rate": 7.520000000000001e-06, "loss": 2.8424, "step": 195 }, { "epoch": 1.568, "grad_norm": 142.14987182617188, "learning_rate": 7.5600000000000005e-06, "loss": 3.0577, "step": 196 }, { "epoch": 1.576, "grad_norm": 62.20249938964844, "learning_rate": 7.6e-06, "loss": 3.3671, "step": 197 }, { "epoch": 1.584, "grad_norm": 210.31344604492188, "learning_rate": 7.64e-06, "loss": 2.8066, "step": 198 }, { "epoch": 1.592, "grad_norm": 87.8990249633789, "learning_rate": 7.68e-06, "loss": 2.4688, "step": 199 }, { "epoch": 1.6, "grad_norm": 154.13255310058594, "learning_rate": 7.72e-06, "loss": 2.7702, "step": 200 }, { "epoch": 1.608, "grad_norm": 229.9895782470703, "learning_rate": 7.76e-06, "loss": 2.7469, "step": 201 }, { "epoch": 1.616, "grad_norm": 62.72343444824219, "learning_rate": 7.8e-06, "loss": 3.1137, "step": 202 }, { "epoch": 1.624, "grad_norm": 203.27638244628906, "learning_rate": 7.84e-06, "loss": 3.1863, "step": 203 }, { "epoch": 1.6320000000000001, "grad_norm": 62.636661529541016, "learning_rate": 7.879999999999999e-06, "loss": 3.5493, "step": 204 }, { "epoch": 1.6400000000000001, "grad_norm": 250.78062438964844, "learning_rate": 7.92e-06, "loss": 2.8948, "step": 205 }, { "epoch": 1.6480000000000001, "grad_norm": 59.534488677978516, "learning_rate": 7.96e-06, "loss": 2.7225, "step": 206 }, { "epoch": 1.6560000000000001, "grad_norm": 142.8958282470703, "learning_rate": 8.000000000000001e-06, "loss": 2.9006, "step": 207 }, { "epoch": 1.6640000000000001, "grad_norm": 108.42957305908203, "learning_rate": 8.040000000000001e-06, "loss": 2.9629, "step": 208 }, { "epoch": 1.6720000000000002, "grad_norm": 99.86851501464844, "learning_rate": 8.08e-06, "loss": 3.0656, "step": 209 }, { "epoch": 1.6800000000000002, "grad_norm": 438.7377014160156, "learning_rate": 8.12e-06, "loss": 2.3551, "step": 210 }, { "epoch": 1.688, "grad_norm": 89.35552978515625, "learning_rate": 8.160000000000001e-06, "loss": 2.9974, "step": 211 }, { "epoch": 1.696, "grad_norm": 154.79946899414062, "learning_rate": 8.200000000000001e-06, "loss": 2.4536, "step": 212 }, { "epoch": 1.704, "grad_norm": 80.24996948242188, "learning_rate": 8.24e-06, "loss": 3.0179, "step": 213 }, { "epoch": 1.712, "grad_norm": 71.15071868896484, "learning_rate": 8.28e-06, "loss": 2.8063, "step": 214 }, { "epoch": 1.72, "grad_norm": 255.00489807128906, "learning_rate": 8.32e-06, "loss": 2.5773, "step": 215 }, { "epoch": 1.728, "grad_norm": 77.42022705078125, "learning_rate": 8.36e-06, "loss": 2.8133, "step": 216 }, { "epoch": 1.736, "grad_norm": 46.29115295410156, "learning_rate": 8.400000000000001e-06, "loss": 2.7757, "step": 217 }, { "epoch": 1.744, "grad_norm": 45.172096252441406, "learning_rate": 8.44e-06, "loss": 2.7651, "step": 218 }, { "epoch": 1.752, "grad_norm": 80.22183227539062, "learning_rate": 8.48e-06, "loss": 2.8984, "step": 219 }, { "epoch": 1.76, "grad_norm": 54.384525299072266, "learning_rate": 8.52e-06, "loss": 2.8317, "step": 220 }, { "epoch": 1.768, "grad_norm": 68.62065887451172, "learning_rate": 8.56e-06, "loss": 2.8191, "step": 221 }, { "epoch": 1.776, "grad_norm": 105.66487121582031, "learning_rate": 8.599999999999999e-06, "loss": 2.8685, "step": 222 }, { "epoch": 1.784, "grad_norm": 110.55906677246094, "learning_rate": 8.64e-06, "loss": 2.2179, "step": 223 }, { "epoch": 1.792, "grad_norm": 314.27777099609375, "learning_rate": 8.68e-06, "loss": 2.8909, "step": 224 }, { "epoch": 1.8, "grad_norm": 77.14180755615234, "learning_rate": 8.720000000000001e-06, "loss": 2.2191, "step": 225 }, { "epoch": 1.808, "grad_norm": 67.75762939453125, "learning_rate": 8.76e-06, "loss": 2.6124, "step": 226 }, { "epoch": 1.8159999999999998, "grad_norm": 2320.4013671875, "learning_rate": 8.8e-06, "loss": 2.3624, "step": 227 }, { "epoch": 1.8239999999999998, "grad_norm": 177.00148010253906, "learning_rate": 8.840000000000002e-06, "loss": 2.7002, "step": 228 }, { "epoch": 1.8319999999999999, "grad_norm": 45.011417388916016, "learning_rate": 8.880000000000001e-06, "loss": 3.0406, "step": 229 }, { "epoch": 1.8399999999999999, "grad_norm": 114.25853729248047, "learning_rate": 8.920000000000001e-06, "loss": 3.0965, "step": 230 }, { "epoch": 1.8479999999999999, "grad_norm": 159.44810485839844, "learning_rate": 8.96e-06, "loss": 3.074, "step": 231 }, { "epoch": 1.8559999999999999, "grad_norm": 52.4694938659668, "learning_rate": 9e-06, "loss": 3.1684, "step": 232 }, { "epoch": 1.8639999999999999, "grad_norm": 72.63941192626953, "learning_rate": 9.04e-06, "loss": 2.5898, "step": 233 }, { "epoch": 1.8719999999999999, "grad_norm": 176.99302673339844, "learning_rate": 9.080000000000001e-06, "loss": 2.7046, "step": 234 }, { "epoch": 1.88, "grad_norm": 151.3446807861328, "learning_rate": 9.12e-06, "loss": 2.4311, "step": 235 }, { "epoch": 1.888, "grad_norm": 79.13739013671875, "learning_rate": 9.16e-06, "loss": 2.7422, "step": 236 }, { "epoch": 1.896, "grad_norm": 34.5649528503418, "learning_rate": 9.2e-06, "loss": 3.1478, "step": 237 }, { "epoch": 1.904, "grad_norm": 104.48634338378906, "learning_rate": 9.24e-06, "loss": 2.6146, "step": 238 }, { "epoch": 1.912, "grad_norm": 117.55387878417969, "learning_rate": 9.28e-06, "loss": 2.5896, "step": 239 }, { "epoch": 1.92, "grad_norm": 72.09014892578125, "learning_rate": 9.32e-06, "loss": 2.8411, "step": 240 }, { "epoch": 1.928, "grad_norm": 83.04727172851562, "learning_rate": 9.36e-06, "loss": 2.8821, "step": 241 }, { "epoch": 1.936, "grad_norm": 54.210594177246094, "learning_rate": 9.4e-06, "loss": 3.8369, "step": 242 }, { "epoch": 1.944, "grad_norm": 55.76344299316406, "learning_rate": 9.44e-06, "loss": 3.1735, "step": 243 }, { "epoch": 1.952, "grad_norm": 45.37208557128906, "learning_rate": 9.48e-06, "loss": 2.6989, "step": 244 }, { "epoch": 1.96, "grad_norm": 151.783203125, "learning_rate": 9.52e-06, "loss": 2.726, "step": 245 }, { "epoch": 1.968, "grad_norm": 46.282135009765625, "learning_rate": 9.560000000000002e-06, "loss": 2.8812, "step": 246 }, { "epoch": 1.976, "grad_norm": 89.7010269165039, "learning_rate": 9.600000000000001e-06, "loss": 2.6056, "step": 247 }, { "epoch": 1.984, "grad_norm": 100.45706176757812, "learning_rate": 9.640000000000001e-06, "loss": 2.4098, "step": 248 }, { "epoch": 1.992, "grad_norm": 54.504791259765625, "learning_rate": 9.68e-06, "loss": 2.5005, "step": 249 }, { "epoch": 2.0, "grad_norm": 94.59587860107422, "learning_rate": 9.72e-06, "loss": 2.4884, "step": 250 }, { "epoch": 2.0, "eval_loss": 2.7548532485961914, "eval_map": 0.0169, "eval_map_50": 0.0273, "eval_map_75": 0.0183, "eval_map_Coverall": 0.0681, "eval_map_Face_Shield": 0.0004, "eval_map_Gloves": 0.0004, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0154, "eval_map_large": 0.021, "eval_map_medium": 0.0058, "eval_map_small": 0.0004, "eval_mar_1": 0.037, "eval_mar_10": 0.0862, "eval_mar_100": 0.1284, "eval_mar_100_Coverall": 0.2889, "eval_mar_100_Face_Shield": 0.0882, "eval_mar_100_Gloves": 0.0361, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.2288, "eval_mar_large": 0.2175, "eval_mar_medium": 0.087, "eval_mar_small": 0.0105, "eval_runtime": 0.9095, "eval_samples_per_second": 31.885, "eval_steps_per_second": 2.199, "step": 250 }, { "epoch": 2.008, "grad_norm": 51.52938461303711, "learning_rate": 9.760000000000001e-06, "loss": 2.7847, "step": 251 }, { "epoch": 2.016, "grad_norm": 74.12057495117188, "learning_rate": 9.800000000000001e-06, "loss": 2.5767, "step": 252 }, { "epoch": 2.024, "grad_norm": 42.784095764160156, "learning_rate": 9.84e-06, "loss": 2.8894, "step": 253 }, { "epoch": 2.032, "grad_norm": 75.74463653564453, "learning_rate": 9.88e-06, "loss": 2.803, "step": 254 }, { "epoch": 2.04, "grad_norm": 41.141422271728516, "learning_rate": 9.92e-06, "loss": 2.7313, "step": 255 }, { "epoch": 2.048, "grad_norm": 274.3045959472656, "learning_rate": 9.96e-06, "loss": 2.602, "step": 256 }, { "epoch": 2.056, "grad_norm": 47.0019645690918, "learning_rate": 1e-05, "loss": 2.7622, "step": 257 }, { "epoch": 2.064, "grad_norm": 59.970123291015625, "learning_rate": 1.004e-05, "loss": 2.4619, "step": 258 }, { "epoch": 2.072, "grad_norm": 92.06230163574219, "learning_rate": 1.008e-05, "loss": 2.5128, "step": 259 }, { "epoch": 2.08, "grad_norm": 130.65170288085938, "learning_rate": 1.012e-05, "loss": 2.2813, "step": 260 }, { "epoch": 2.088, "grad_norm": 209.51669311523438, "learning_rate": 1.016e-05, "loss": 2.5447, "step": 261 }, { "epoch": 2.096, "grad_norm": 88.61373901367188, "learning_rate": 1.02e-05, "loss": 2.4462, "step": 262 }, { "epoch": 2.104, "grad_norm": 44.519046783447266, "learning_rate": 1.024e-05, "loss": 2.5827, "step": 263 }, { "epoch": 2.112, "grad_norm": 35.72035217285156, "learning_rate": 1.0280000000000002e-05, "loss": 2.8189, "step": 264 }, { "epoch": 2.12, "grad_norm": 82.36544799804688, "learning_rate": 1.0320000000000001e-05, "loss": 2.4532, "step": 265 }, { "epoch": 2.128, "grad_norm": 42.10184097290039, "learning_rate": 1.036e-05, "loss": 2.7967, "step": 266 }, { "epoch": 2.136, "grad_norm": 27.73836326599121, "learning_rate": 1.04e-05, "loss": 2.4983, "step": 267 }, { "epoch": 2.144, "grad_norm": 584.4302978515625, "learning_rate": 1.0440000000000002e-05, "loss": 2.5855, "step": 268 }, { "epoch": 2.152, "grad_norm": 66.43081665039062, "learning_rate": 1.0480000000000001e-05, "loss": 2.3695, "step": 269 }, { "epoch": 2.16, "grad_norm": 51.37084197998047, "learning_rate": 1.0520000000000001e-05, "loss": 3.149, "step": 270 }, { "epoch": 2.168, "grad_norm": 67.77560424804688, "learning_rate": 1.056e-05, "loss": 2.5453, "step": 271 }, { "epoch": 2.176, "grad_norm": 217.70994567871094, "learning_rate": 1.06e-05, "loss": 3.0655, "step": 272 }, { "epoch": 2.184, "grad_norm": 46.19034957885742, "learning_rate": 1.064e-05, "loss": 3.0025, "step": 273 }, { "epoch": 2.192, "grad_norm": 134.26878356933594, "learning_rate": 1.0680000000000001e-05, "loss": 2.4732, "step": 274 }, { "epoch": 2.2, "grad_norm": 78.79590606689453, "learning_rate": 1.072e-05, "loss": 2.6338, "step": 275 }, { "epoch": 2.208, "grad_norm": 52.01063537597656, "learning_rate": 1.076e-05, "loss": 2.1205, "step": 276 }, { "epoch": 2.216, "grad_norm": 136.5937957763672, "learning_rate": 1.08e-05, "loss": 2.5527, "step": 277 }, { "epoch": 2.224, "grad_norm": 97.16157531738281, "learning_rate": 1.084e-05, "loss": 2.2873, "step": 278 }, { "epoch": 2.232, "grad_norm": 50.401939392089844, "learning_rate": 1.088e-05, "loss": 2.487, "step": 279 }, { "epoch": 2.24, "grad_norm": 61.39751052856445, "learning_rate": 1.092e-05, "loss": 2.3012, "step": 280 }, { "epoch": 2.248, "grad_norm": 73.65274047851562, "learning_rate": 1.096e-05, "loss": 2.1546, "step": 281 }, { "epoch": 2.2560000000000002, "grad_norm": 98.0835952758789, "learning_rate": 1.1000000000000001e-05, "loss": 2.4086, "step": 282 }, { "epoch": 2.2640000000000002, "grad_norm": 60.72865676879883, "learning_rate": 1.1040000000000001e-05, "loss": 2.2801, "step": 283 }, { "epoch": 2.2720000000000002, "grad_norm": 53.5462646484375, "learning_rate": 1.108e-05, "loss": 2.5341, "step": 284 }, { "epoch": 2.2800000000000002, "grad_norm": 44.289730072021484, "learning_rate": 1.112e-05, "loss": 2.4743, "step": 285 }, { "epoch": 2.288, "grad_norm": 77.33063507080078, "learning_rate": 1.1160000000000002e-05, "loss": 2.3163, "step": 286 }, { "epoch": 2.296, "grad_norm": 63.59436798095703, "learning_rate": 1.1200000000000001e-05, "loss": 2.7246, "step": 287 }, { "epoch": 2.304, "grad_norm": 48.14683532714844, "learning_rate": 1.124e-05, "loss": 2.337, "step": 288 }, { "epoch": 2.312, "grad_norm": 44.55038070678711, "learning_rate": 1.128e-05, "loss": 2.2908, "step": 289 }, { "epoch": 2.32, "grad_norm": 91.69697570800781, "learning_rate": 1.132e-05, "loss": 2.4908, "step": 290 }, { "epoch": 2.328, "grad_norm": 38.03496170043945, "learning_rate": 1.1360000000000001e-05, "loss": 3.4362, "step": 291 }, { "epoch": 2.336, "grad_norm": 38.595767974853516, "learning_rate": 1.1400000000000001e-05, "loss": 2.318, "step": 292 }, { "epoch": 2.344, "grad_norm": 74.21697235107422, "learning_rate": 1.144e-05, "loss": 2.4179, "step": 293 }, { "epoch": 2.352, "grad_norm": 42.392784118652344, "learning_rate": 1.148e-05, "loss": 2.4681, "step": 294 }, { "epoch": 2.36, "grad_norm": 61.634979248046875, "learning_rate": 1.152e-05, "loss": 2.5406, "step": 295 }, { "epoch": 2.368, "grad_norm": 50.476070404052734, "learning_rate": 1.156e-05, "loss": 2.4414, "step": 296 }, { "epoch": 2.376, "grad_norm": 133.6861572265625, "learning_rate": 1.16e-05, "loss": 2.4318, "step": 297 }, { "epoch": 2.384, "grad_norm": 117.64387512207031, "learning_rate": 1.164e-05, "loss": 2.3087, "step": 298 }, { "epoch": 2.392, "grad_norm": 47.455787658691406, "learning_rate": 1.168e-05, "loss": 2.3415, "step": 299 }, { "epoch": 2.4, "grad_norm": 50.65950393676758, "learning_rate": 1.172e-05, "loss": 2.4852, "step": 300 }, { "epoch": 2.408, "grad_norm": 114.41341400146484, "learning_rate": 1.1760000000000001e-05, "loss": 2.1738, "step": 301 }, { "epoch": 2.416, "grad_norm": 54.930538177490234, "learning_rate": 1.18e-05, "loss": 1.9029, "step": 302 }, { "epoch": 2.424, "grad_norm": 91.74811553955078, "learning_rate": 1.1840000000000002e-05, "loss": 2.8909, "step": 303 }, { "epoch": 2.432, "grad_norm": 68.61322784423828, "learning_rate": 1.1880000000000001e-05, "loss": 2.2619, "step": 304 }, { "epoch": 2.44, "grad_norm": 108.32382202148438, "learning_rate": 1.1920000000000001e-05, "loss": 2.429, "step": 305 }, { "epoch": 2.448, "grad_norm": 229.13436889648438, "learning_rate": 1.196e-05, "loss": 2.1056, "step": 306 }, { "epoch": 2.456, "grad_norm": 66.59453582763672, "learning_rate": 1.2e-05, "loss": 2.1231, "step": 307 }, { "epoch": 2.464, "grad_norm": 64.43763732910156, "learning_rate": 1.204e-05, "loss": 2.2886, "step": 308 }, { "epoch": 2.472, "grad_norm": 60.82057189941406, "learning_rate": 1.2080000000000001e-05, "loss": 2.0467, "step": 309 }, { "epoch": 2.48, "grad_norm": 79.16918182373047, "learning_rate": 1.2120000000000001e-05, "loss": 2.0382, "step": 310 }, { "epoch": 2.488, "grad_norm": 37.86714172363281, "learning_rate": 1.216e-05, "loss": 2.2343, "step": 311 }, { "epoch": 2.496, "grad_norm": 55.411216735839844, "learning_rate": 1.22e-05, "loss": 2.1652, "step": 312 }, { "epoch": 2.504, "grad_norm": 70.39246368408203, "learning_rate": 1.224e-05, "loss": 2.0799, "step": 313 }, { "epoch": 2.512, "grad_norm": 491.8386535644531, "learning_rate": 1.2280000000000001e-05, "loss": 2.8014, "step": 314 }, { "epoch": 2.52, "grad_norm": 158.88726806640625, "learning_rate": 1.232e-05, "loss": 2.6094, "step": 315 }, { "epoch": 2.528, "grad_norm": 67.58332824707031, "learning_rate": 1.236e-05, "loss": 2.6788, "step": 316 }, { "epoch": 2.536, "grad_norm": 74.94740295410156, "learning_rate": 1.24e-05, "loss": 2.1899, "step": 317 }, { "epoch": 2.544, "grad_norm": 31.24871063232422, "learning_rate": 1.244e-05, "loss": 2.0854, "step": 318 }, { "epoch": 2.552, "grad_norm": 337.5387268066406, "learning_rate": 1.248e-05, "loss": 2.0785, "step": 319 }, { "epoch": 2.56, "grad_norm": 369.1131591796875, "learning_rate": 1.252e-05, "loss": 2.3434, "step": 320 }, { "epoch": 2.568, "grad_norm": 70.43251037597656, "learning_rate": 1.256e-05, "loss": 2.232, "step": 321 }, { "epoch": 2.576, "grad_norm": 53.073974609375, "learning_rate": 1.2600000000000001e-05, "loss": 1.5944, "step": 322 }, { "epoch": 2.584, "grad_norm": 57.46676254272461, "learning_rate": 1.2640000000000003e-05, "loss": 2.2067, "step": 323 }, { "epoch": 2.592, "grad_norm": 41.12956619262695, "learning_rate": 1.268e-05, "loss": 2.6257, "step": 324 }, { "epoch": 2.6, "grad_norm": 39.85604476928711, "learning_rate": 1.2720000000000002e-05, "loss": 2.4871, "step": 325 }, { "epoch": 2.608, "grad_norm": 42.13130187988281, "learning_rate": 1.276e-05, "loss": 1.8372, "step": 326 }, { "epoch": 2.616, "grad_norm": 61.26386642456055, "learning_rate": 1.2800000000000001e-05, "loss": 1.9118, "step": 327 }, { "epoch": 2.624, "grad_norm": 70.9827880859375, "learning_rate": 1.2839999999999999e-05, "loss": 2.0045, "step": 328 }, { "epoch": 2.632, "grad_norm": 45.39491271972656, "learning_rate": 1.288e-05, "loss": 1.9395, "step": 329 }, { "epoch": 2.64, "grad_norm": 42.04745864868164, "learning_rate": 1.2920000000000002e-05, "loss": 2.1882, "step": 330 }, { "epoch": 2.648, "grad_norm": 123.60689544677734, "learning_rate": 1.296e-05, "loss": 1.7153, "step": 331 }, { "epoch": 2.656, "grad_norm": 36.16223907470703, "learning_rate": 1.3000000000000001e-05, "loss": 1.9609, "step": 332 }, { "epoch": 2.664, "grad_norm": 45.56760787963867, "learning_rate": 1.3039999999999999e-05, "loss": 2.0562, "step": 333 }, { "epoch": 2.672, "grad_norm": 43.99277114868164, "learning_rate": 1.308e-05, "loss": 2.11, "step": 334 }, { "epoch": 2.68, "grad_norm": 55.098690032958984, "learning_rate": 1.3120000000000001e-05, "loss": 2.6705, "step": 335 }, { "epoch": 2.6879999999999997, "grad_norm": 65.23367309570312, "learning_rate": 1.316e-05, "loss": 2.0526, "step": 336 }, { "epoch": 2.6959999999999997, "grad_norm": 41.57449722290039, "learning_rate": 1.32e-05, "loss": 2.2515, "step": 337 }, { "epoch": 2.7039999999999997, "grad_norm": 53.02543640136719, "learning_rate": 1.324e-05, "loss": 2.0637, "step": 338 }, { "epoch": 2.7119999999999997, "grad_norm": 45.33719253540039, "learning_rate": 1.3280000000000002e-05, "loss": 2.0137, "step": 339 }, { "epoch": 2.7199999999999998, "grad_norm": 71.6439208984375, "learning_rate": 1.3320000000000001e-05, "loss": 2.0413, "step": 340 }, { "epoch": 2.7279999999999998, "grad_norm": 262.7536315917969, "learning_rate": 1.336e-05, "loss": 2.7257, "step": 341 }, { "epoch": 2.7359999999999998, "grad_norm": 73.61746215820312, "learning_rate": 1.3400000000000002e-05, "loss": 1.7551, "step": 342 }, { "epoch": 2.7439999999999998, "grad_norm": 129.1374053955078, "learning_rate": 1.344e-05, "loss": 2.5132, "step": 343 }, { "epoch": 2.752, "grad_norm": 58.477352142333984, "learning_rate": 1.3480000000000001e-05, "loss": 2.093, "step": 344 }, { "epoch": 2.76, "grad_norm": 72.5787582397461, "learning_rate": 1.352e-05, "loss": 2.3316, "step": 345 }, { "epoch": 2.768, "grad_norm": 36.07229232788086, "learning_rate": 1.356e-05, "loss": 2.538, "step": 346 }, { "epoch": 2.776, "grad_norm": 60.25495529174805, "learning_rate": 1.3600000000000002e-05, "loss": 3.0173, "step": 347 }, { "epoch": 2.784, "grad_norm": 53.548316955566406, "learning_rate": 1.364e-05, "loss": 2.5482, "step": 348 }, { "epoch": 2.792, "grad_norm": 48.00714111328125, "learning_rate": 1.3680000000000001e-05, "loss": 2.0758, "step": 349 }, { "epoch": 2.8, "grad_norm": 30.8994083404541, "learning_rate": 1.3719999999999999e-05, "loss": 2.453, "step": 350 }, { "epoch": 2.808, "grad_norm": 50.33769226074219, "learning_rate": 1.376e-05, "loss": 2.1317, "step": 351 }, { "epoch": 2.816, "grad_norm": 106.04808807373047, "learning_rate": 1.3800000000000002e-05, "loss": 2.548, "step": 352 }, { "epoch": 2.824, "grad_norm": 83.40410614013672, "learning_rate": 1.384e-05, "loss": 1.9834, "step": 353 }, { "epoch": 2.832, "grad_norm": 43.24612045288086, "learning_rate": 1.3880000000000001e-05, "loss": 2.0977, "step": 354 }, { "epoch": 2.84, "grad_norm": 43.37907791137695, "learning_rate": 1.3919999999999999e-05, "loss": 2.2027, "step": 355 }, { "epoch": 2.848, "grad_norm": 52.164188385009766, "learning_rate": 1.396e-05, "loss": 2.0784, "step": 356 }, { "epoch": 2.856, "grad_norm": 139.660888671875, "learning_rate": 1.4000000000000001e-05, "loss": 2.5692, "step": 357 }, { "epoch": 2.864, "grad_norm": 212.07412719726562, "learning_rate": 1.4040000000000001e-05, "loss": 1.9617, "step": 358 }, { "epoch": 2.872, "grad_norm": 74.51329040527344, "learning_rate": 1.408e-05, "loss": 1.939, "step": 359 }, { "epoch": 2.88, "grad_norm": 61.94458770751953, "learning_rate": 1.412e-05, "loss": 2.3833, "step": 360 }, { "epoch": 2.888, "grad_norm": 93.32464599609375, "learning_rate": 1.4160000000000002e-05, "loss": 2.0459, "step": 361 }, { "epoch": 2.896, "grad_norm": 42.589237213134766, "learning_rate": 1.42e-05, "loss": 2.3239, "step": 362 }, { "epoch": 2.904, "grad_norm": 56.22827911376953, "learning_rate": 1.4240000000000001e-05, "loss": 1.9598, "step": 363 }, { "epoch": 2.912, "grad_norm": 85.45355987548828, "learning_rate": 1.4280000000000002e-05, "loss": 2.4718, "step": 364 }, { "epoch": 2.92, "grad_norm": 48.44956588745117, "learning_rate": 1.432e-05, "loss": 1.9303, "step": 365 }, { "epoch": 2.928, "grad_norm": 92.14666748046875, "learning_rate": 1.4360000000000001e-05, "loss": 2.2756, "step": 366 }, { "epoch": 2.936, "grad_norm": 91.48953247070312, "learning_rate": 1.44e-05, "loss": 3.3122, "step": 367 }, { "epoch": 2.944, "grad_norm": 65.46638488769531, "learning_rate": 1.444e-05, "loss": 2.7157, "step": 368 }, { "epoch": 2.952, "grad_norm": 71.1252212524414, "learning_rate": 1.4480000000000002e-05, "loss": 2.2059, "step": 369 }, { "epoch": 2.96, "grad_norm": 42.69770431518555, "learning_rate": 1.452e-05, "loss": 1.8286, "step": 370 }, { "epoch": 2.968, "grad_norm": 63.86955261230469, "learning_rate": 1.4560000000000001e-05, "loss": 2.1086, "step": 371 }, { "epoch": 2.976, "grad_norm": 95.89076232910156, "learning_rate": 1.4599999999999999e-05, "loss": 2.6997, "step": 372 }, { "epoch": 2.984, "grad_norm": 23.554903030395508, "learning_rate": 1.464e-05, "loss": 1.8257, "step": 373 }, { "epoch": 2.992, "grad_norm": 57.88138961791992, "learning_rate": 1.4680000000000002e-05, "loss": 2.2071, "step": 374 }, { "epoch": 3.0, "grad_norm": 37.864200592041016, "learning_rate": 1.472e-05, "loss": 2.2884, "step": 375 }, { "epoch": 3.0, "eval_loss": 2.3634049892425537, "eval_map": 0.0338, "eval_map_50": 0.0606, "eval_map_75": 0.0293, "eval_map_Coverall": 0.1452, "eval_map_Face_Shield": 0.0009, "eval_map_Gloves": 0.0043, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0188, "eval_map_large": 0.0349, "eval_map_medium": 0.0079, "eval_map_small": 0.0047, "eval_mar_1": 0.0662, "eval_mar_10": 0.1682, "eval_mar_100": 0.2077, "eval_mar_100_Coverall": 0.6533, "eval_mar_100_Face_Shield": 0.0412, "eval_mar_100_Gloves": 0.1246, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.2192, "eval_mar_large": 0.2637, "eval_mar_medium": 0.1073, "eval_mar_small": 0.0285, "eval_runtime": 0.9168, "eval_samples_per_second": 31.633, "eval_steps_per_second": 2.182, "step": 375 }, { "epoch": 3.008, "grad_norm": 63.02311706542969, "learning_rate": 1.4760000000000001e-05, "loss": 2.2746, "step": 376 }, { "epoch": 3.016, "grad_norm": 133.93997192382812, "learning_rate": 1.48e-05, "loss": 1.9035, "step": 377 }, { "epoch": 3.024, "grad_norm": 112.5084228515625, "learning_rate": 1.4840000000000002e-05, "loss": 1.9696, "step": 378 }, { "epoch": 3.032, "grad_norm": 153.34812927246094, "learning_rate": 1.488e-05, "loss": 2.1384, "step": 379 }, { "epoch": 3.04, "grad_norm": 67.32705688476562, "learning_rate": 1.4920000000000001e-05, "loss": 2.2409, "step": 380 }, { "epoch": 3.048, "grad_norm": 62.46927261352539, "learning_rate": 1.4960000000000002e-05, "loss": 2.2268, "step": 381 }, { "epoch": 3.056, "grad_norm": 46.22364044189453, "learning_rate": 1.5e-05, "loss": 1.5525, "step": 382 }, { "epoch": 3.064, "grad_norm": 59.13709259033203, "learning_rate": 1.5040000000000002e-05, "loss": 3.4135, "step": 383 }, { "epoch": 3.072, "grad_norm": 50.634376525878906, "learning_rate": 1.508e-05, "loss": 2.3919, "step": 384 }, { "epoch": 3.08, "grad_norm": 283.5030517578125, "learning_rate": 1.5120000000000001e-05, "loss": 1.9899, "step": 385 }, { "epoch": 3.088, "grad_norm": 95.03755950927734, "learning_rate": 1.5160000000000002e-05, "loss": 2.0636, "step": 386 }, { "epoch": 3.096, "grad_norm": 261.8937683105469, "learning_rate": 1.52e-05, "loss": 2.0435, "step": 387 }, { "epoch": 3.104, "grad_norm": 30.873079299926758, "learning_rate": 1.5240000000000001e-05, "loss": 2.0491, "step": 388 }, { "epoch": 3.112, "grad_norm": 49.87833023071289, "learning_rate": 1.528e-05, "loss": 2.0215, "step": 389 }, { "epoch": 3.12, "grad_norm": 57.44118118286133, "learning_rate": 1.5320000000000002e-05, "loss": 1.8242, "step": 390 }, { "epoch": 3.128, "grad_norm": 194.5751190185547, "learning_rate": 1.536e-05, "loss": 2.2893, "step": 391 }, { "epoch": 3.136, "grad_norm": 51.35193634033203, "learning_rate": 1.54e-05, "loss": 2.3134, "step": 392 }, { "epoch": 3.144, "grad_norm": 80.30274200439453, "learning_rate": 1.544e-05, "loss": 2.0996, "step": 393 }, { "epoch": 3.152, "grad_norm": 56.29761505126953, "learning_rate": 1.548e-05, "loss": 2.015, "step": 394 }, { "epoch": 3.16, "grad_norm": 51.408714294433594, "learning_rate": 1.552e-05, "loss": 2.096, "step": 395 }, { "epoch": 3.168, "grad_norm": 40.54011917114258, "learning_rate": 1.556e-05, "loss": 2.5388, "step": 396 }, { "epoch": 3.176, "grad_norm": 204.88377380371094, "learning_rate": 1.56e-05, "loss": 2.2609, "step": 397 }, { "epoch": 3.184, "grad_norm": 49.389766693115234, "learning_rate": 1.5640000000000003e-05, "loss": 1.893, "step": 398 }, { "epoch": 3.192, "grad_norm": 36.26692199707031, "learning_rate": 1.568e-05, "loss": 2.8899, "step": 399 }, { "epoch": 3.2, "grad_norm": 43.269561767578125, "learning_rate": 1.5720000000000002e-05, "loss": 1.9465, "step": 400 }, { "epoch": 3.208, "grad_norm": 49.37830352783203, "learning_rate": 1.5759999999999998e-05, "loss": 2.2632, "step": 401 }, { "epoch": 3.216, "grad_norm": 75.28233337402344, "learning_rate": 1.58e-05, "loss": 1.6036, "step": 402 }, { "epoch": 3.224, "grad_norm": 97.49056243896484, "learning_rate": 1.584e-05, "loss": 1.8692, "step": 403 }, { "epoch": 3.232, "grad_norm": 47.81147384643555, "learning_rate": 1.588e-05, "loss": 2.0688, "step": 404 }, { "epoch": 3.24, "grad_norm": 63.02088928222656, "learning_rate": 1.592e-05, "loss": 2.0167, "step": 405 }, { "epoch": 3.248, "grad_norm": 54.76719284057617, "learning_rate": 1.596e-05, "loss": 1.659, "step": 406 }, { "epoch": 3.2560000000000002, "grad_norm": 67.61274719238281, "learning_rate": 1.6000000000000003e-05, "loss": 2.3324, "step": 407 }, { "epoch": 3.2640000000000002, "grad_norm": 61.03977966308594, "learning_rate": 1.604e-05, "loss": 1.7128, "step": 408 }, { "epoch": 3.2720000000000002, "grad_norm": 59.36174774169922, "learning_rate": 1.6080000000000002e-05, "loss": 1.9622, "step": 409 }, { "epoch": 3.2800000000000002, "grad_norm": 53.25993728637695, "learning_rate": 1.612e-05, "loss": 2.0648, "step": 410 }, { "epoch": 3.288, "grad_norm": 167.18377685546875, "learning_rate": 1.616e-05, "loss": 2.42, "step": 411 }, { "epoch": 3.296, "grad_norm": 124.80193328857422, "learning_rate": 1.62e-05, "loss": 2.2724, "step": 412 }, { "epoch": 3.304, "grad_norm": 45.94084930419922, "learning_rate": 1.624e-05, "loss": 2.1649, "step": 413 }, { "epoch": 3.312, "grad_norm": 39.99915313720703, "learning_rate": 1.628e-05, "loss": 1.9726, "step": 414 }, { "epoch": 3.32, "grad_norm": 129.72503662109375, "learning_rate": 1.6320000000000003e-05, "loss": 2.0855, "step": 415 }, { "epoch": 3.328, "grad_norm": 309.35986328125, "learning_rate": 1.636e-05, "loss": 2.0507, "step": 416 }, { "epoch": 3.336, "grad_norm": 79.27793884277344, "learning_rate": 1.6400000000000002e-05, "loss": 2.1169, "step": 417 }, { "epoch": 3.344, "grad_norm": 51.274105072021484, "learning_rate": 1.644e-05, "loss": 1.7297, "step": 418 }, { "epoch": 3.352, "grad_norm": 100.9406967163086, "learning_rate": 1.648e-05, "loss": 1.9511, "step": 419 }, { "epoch": 3.36, "grad_norm": 65.0149154663086, "learning_rate": 1.652e-05, "loss": 2.1214, "step": 420 }, { "epoch": 3.368, "grad_norm": 68.61194610595703, "learning_rate": 1.656e-05, "loss": 2.4385, "step": 421 }, { "epoch": 3.376, "grad_norm": 42.565521240234375, "learning_rate": 1.66e-05, "loss": 2.0304, "step": 422 }, { "epoch": 3.384, "grad_norm": 75.29212951660156, "learning_rate": 1.664e-05, "loss": 2.145, "step": 423 }, { "epoch": 3.392, "grad_norm": 236.9296417236328, "learning_rate": 1.668e-05, "loss": 3.1836, "step": 424 }, { "epoch": 3.4, "grad_norm": 69.20806884765625, "learning_rate": 1.672e-05, "loss": 1.6874, "step": 425 }, { "epoch": 3.408, "grad_norm": 55.23190689086914, "learning_rate": 1.6760000000000002e-05, "loss": 1.8663, "step": 426 }, { "epoch": 3.416, "grad_norm": 30.767248153686523, "learning_rate": 1.6800000000000002e-05, "loss": 1.9932, "step": 427 }, { "epoch": 3.424, "grad_norm": 46.00278854370117, "learning_rate": 1.684e-05, "loss": 1.9403, "step": 428 }, { "epoch": 3.432, "grad_norm": 57.33998107910156, "learning_rate": 1.688e-05, "loss": 2.2381, "step": 429 }, { "epoch": 3.44, "grad_norm": 99.2421646118164, "learning_rate": 1.692e-05, "loss": 2.186, "step": 430 }, { "epoch": 3.448, "grad_norm": 50.2891960144043, "learning_rate": 1.696e-05, "loss": 2.4046, "step": 431 }, { "epoch": 3.456, "grad_norm": 30.263507843017578, "learning_rate": 1.7000000000000003e-05, "loss": 2.3384, "step": 432 }, { "epoch": 3.464, "grad_norm": 47.434776306152344, "learning_rate": 1.704e-05, "loss": 2.1365, "step": 433 }, { "epoch": 3.472, "grad_norm": 45.71773147583008, "learning_rate": 1.7080000000000002e-05, "loss": 2.1208, "step": 434 }, { "epoch": 3.48, "grad_norm": 66.17249298095703, "learning_rate": 1.712e-05, "loss": 2.0449, "step": 435 }, { "epoch": 3.488, "grad_norm": 48.117000579833984, "learning_rate": 1.7160000000000002e-05, "loss": 1.8566, "step": 436 }, { "epoch": 3.496, "grad_norm": 61.332130432128906, "learning_rate": 1.7199999999999998e-05, "loss": 1.9047, "step": 437 }, { "epoch": 3.504, "grad_norm": 31.802583694458008, "learning_rate": 1.724e-05, "loss": 2.5926, "step": 438 }, { "epoch": 3.512, "grad_norm": 262.84820556640625, "learning_rate": 1.728e-05, "loss": 2.7515, "step": 439 }, { "epoch": 3.52, "grad_norm": 42.373619079589844, "learning_rate": 1.732e-05, "loss": 2.0422, "step": 440 }, { "epoch": 3.528, "grad_norm": 63.514766693115234, "learning_rate": 1.736e-05, "loss": 1.6295, "step": 441 }, { "epoch": 3.536, "grad_norm": 101.97686004638672, "learning_rate": 1.74e-05, "loss": 1.7955, "step": 442 }, { "epoch": 3.544, "grad_norm": 32.1984977722168, "learning_rate": 1.7440000000000002e-05, "loss": 2.0901, "step": 443 }, { "epoch": 3.552, "grad_norm": 145.59686279296875, "learning_rate": 1.7480000000000002e-05, "loss": 2.2548, "step": 444 }, { "epoch": 3.56, "grad_norm": 68.91549682617188, "learning_rate": 1.752e-05, "loss": 1.7848, "step": 445 }, { "epoch": 3.568, "grad_norm": 169.77870178222656, "learning_rate": 1.756e-05, "loss": 1.6524, "step": 446 }, { "epoch": 3.576, "grad_norm": 90.98660278320312, "learning_rate": 1.76e-05, "loss": 1.8797, "step": 447 }, { "epoch": 3.584, "grad_norm": 55.57237243652344, "learning_rate": 1.764e-05, "loss": 1.7558, "step": 448 }, { "epoch": 3.592, "grad_norm": 34.127689361572266, "learning_rate": 1.7680000000000004e-05, "loss": 1.9119, "step": 449 }, { "epoch": 3.6, "grad_norm": 179.04283142089844, "learning_rate": 1.772e-05, "loss": 1.857, "step": 450 }, { "epoch": 3.608, "grad_norm": 98.45733642578125, "learning_rate": 1.7760000000000003e-05, "loss": 1.7802, "step": 451 }, { "epoch": 3.616, "grad_norm": 92.62533569335938, "learning_rate": 1.78e-05, "loss": 2.6863, "step": 452 }, { "epoch": 3.624, "grad_norm": 322.2291259765625, "learning_rate": 1.7840000000000002e-05, "loss": 1.6168, "step": 453 }, { "epoch": 3.632, "grad_norm": 60.06715774536133, "learning_rate": 1.7879999999999998e-05, "loss": 1.6835, "step": 454 }, { "epoch": 3.64, "grad_norm": 45.61931228637695, "learning_rate": 1.792e-05, "loss": 1.8763, "step": 455 }, { "epoch": 3.648, "grad_norm": 134.01707458496094, "learning_rate": 1.796e-05, "loss": 1.958, "step": 456 }, { "epoch": 3.656, "grad_norm": 122.758056640625, "learning_rate": 1.8e-05, "loss": 1.8968, "step": 457 }, { "epoch": 3.664, "grad_norm": 72.61627960205078, "learning_rate": 1.804e-05, "loss": 2.304, "step": 458 }, { "epoch": 3.672, "grad_norm": 80.7463607788086, "learning_rate": 1.808e-05, "loss": 1.7044, "step": 459 }, { "epoch": 3.68, "grad_norm": 140.1997833251953, "learning_rate": 1.812e-05, "loss": 1.9983, "step": 460 }, { "epoch": 3.6879999999999997, "grad_norm": 81.81644439697266, "learning_rate": 1.8160000000000002e-05, "loss": 2.3777, "step": 461 }, { "epoch": 3.6959999999999997, "grad_norm": 35.85359573364258, "learning_rate": 1.8200000000000002e-05, "loss": 2.412, "step": 462 }, { "epoch": 3.7039999999999997, "grad_norm": 109.52523803710938, "learning_rate": 1.824e-05, "loss": 2.2597, "step": 463 }, { "epoch": 3.7119999999999997, "grad_norm": 137.60081481933594, "learning_rate": 1.828e-05, "loss": 2.0792, "step": 464 }, { "epoch": 3.7199999999999998, "grad_norm": 41.27918243408203, "learning_rate": 1.832e-05, "loss": 2.7979, "step": 465 }, { "epoch": 3.7279999999999998, "grad_norm": 135.56109619140625, "learning_rate": 1.8360000000000004e-05, "loss": 1.6405, "step": 466 }, { "epoch": 3.7359999999999998, "grad_norm": 112.85850524902344, "learning_rate": 1.84e-05, "loss": 1.9073, "step": 467 }, { "epoch": 3.7439999999999998, "grad_norm": 36.69434356689453, "learning_rate": 1.8440000000000003e-05, "loss": 2.8433, "step": 468 }, { "epoch": 3.752, "grad_norm": 344.5074462890625, "learning_rate": 1.848e-05, "loss": 1.8117, "step": 469 }, { "epoch": 3.76, "grad_norm": 241.0221710205078, "learning_rate": 1.8520000000000002e-05, "loss": 2.0641, "step": 470 }, { "epoch": 3.768, "grad_norm": 204.78070068359375, "learning_rate": 1.856e-05, "loss": 1.8411, "step": 471 }, { "epoch": 3.776, "grad_norm": 202.7001190185547, "learning_rate": 1.86e-05, "loss": 2.2207, "step": 472 }, { "epoch": 3.784, "grad_norm": 74.59146881103516, "learning_rate": 1.864e-05, "loss": 1.9543, "step": 473 }, { "epoch": 3.792, "grad_norm": 36.93360900878906, "learning_rate": 1.868e-05, "loss": 1.885, "step": 474 }, { "epoch": 3.8, "grad_norm": 117.50788116455078, "learning_rate": 1.872e-05, "loss": 1.7961, "step": 475 }, { "epoch": 3.808, "grad_norm": 43.787147521972656, "learning_rate": 1.876e-05, "loss": 1.626, "step": 476 }, { "epoch": 3.816, "grad_norm": 53.123355865478516, "learning_rate": 1.88e-05, "loss": 2.2446, "step": 477 }, { "epoch": 3.824, "grad_norm": 104.37947845458984, "learning_rate": 1.8840000000000003e-05, "loss": 2.1325, "step": 478 }, { "epoch": 3.832, "grad_norm": 57.664676666259766, "learning_rate": 1.888e-05, "loss": 1.9481, "step": 479 }, { "epoch": 3.84, "grad_norm": 46.17082977294922, "learning_rate": 1.8920000000000002e-05, "loss": 1.7929, "step": 480 }, { "epoch": 3.848, "grad_norm": 95.28953552246094, "learning_rate": 1.896e-05, "loss": 2.3943, "step": 481 }, { "epoch": 3.856, "grad_norm": 50.82599639892578, "learning_rate": 1.9e-05, "loss": 1.9738, "step": 482 }, { "epoch": 3.864, "grad_norm": 31.506122589111328, "learning_rate": 1.904e-05, "loss": 2.3322, "step": 483 }, { "epoch": 3.872, "grad_norm": 73.5311279296875, "learning_rate": 1.908e-05, "loss": 2.2775, "step": 484 }, { "epoch": 3.88, "grad_norm": 51.88594055175781, "learning_rate": 1.9120000000000003e-05, "loss": 1.6442, "step": 485 }, { "epoch": 3.888, "grad_norm": 129.4317626953125, "learning_rate": 1.916e-05, "loss": 1.9133, "step": 486 }, { "epoch": 3.896, "grad_norm": 73.22756958007812, "learning_rate": 1.9200000000000003e-05, "loss": 1.9811, "step": 487 }, { "epoch": 3.904, "grad_norm": 106.6075439453125, "learning_rate": 1.924e-05, "loss": 1.87, "step": 488 }, { "epoch": 3.912, "grad_norm": 93.94361877441406, "learning_rate": 1.9280000000000002e-05, "loss": 1.8966, "step": 489 }, { "epoch": 3.92, "grad_norm": 37.47288131713867, "learning_rate": 1.932e-05, "loss": 1.6729, "step": 490 }, { "epoch": 3.928, "grad_norm": 32.17375183105469, "learning_rate": 1.936e-05, "loss": 2.1113, "step": 491 }, { "epoch": 3.936, "grad_norm": 49.617435455322266, "learning_rate": 1.94e-05, "loss": 3.0428, "step": 492 }, { "epoch": 3.944, "grad_norm": 63.207706451416016, "learning_rate": 1.944e-05, "loss": 2.3749, "step": 493 }, { "epoch": 3.952, "grad_norm": 99.62252807617188, "learning_rate": 1.948e-05, "loss": 2.1169, "step": 494 }, { "epoch": 3.96, "grad_norm": 45.129154205322266, "learning_rate": 1.9520000000000003e-05, "loss": 1.9197, "step": 495 }, { "epoch": 3.968, "grad_norm": 235.99624633789062, "learning_rate": 1.956e-05, "loss": 1.9082, "step": 496 }, { "epoch": 3.976, "grad_norm": 89.82752227783203, "learning_rate": 1.9600000000000002e-05, "loss": 2.5579, "step": 497 }, { "epoch": 3.984, "grad_norm": 36.550086975097656, "learning_rate": 1.9640000000000002e-05, "loss": 1.8188, "step": 498 }, { "epoch": 3.992, "grad_norm": 64.53687286376953, "learning_rate": 1.968e-05, "loss": 2.5104, "step": 499 }, { "epoch": 4.0, "grad_norm": 152.79161071777344, "learning_rate": 1.972e-05, "loss": 1.5812, "step": 500 }, { "epoch": 4.0, "eval_loss": 2.2298333644866943, "eval_map": 0.063, "eval_map_50": 0.1131, "eval_map_75": 0.0574, "eval_map_Coverall": 0.2621, "eval_map_Face_Shield": 0.0, "eval_map_Gloves": 0.023, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0296, "eval_map_large": 0.0722, "eval_map_medium": 0.0151, "eval_map_small": 0.0149, "eval_mar_1": 0.097, "eval_mar_10": 0.2003, "eval_mar_100": 0.2397, "eval_mar_100_Coverall": 0.6622, "eval_mar_100_Face_Shield": 0.0, "eval_mar_100_Gloves": 0.2689, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.2673, "eval_mar_large": 0.2436, "eval_mar_medium": 0.1397, "eval_mar_small": 0.0563, "eval_runtime": 0.9098, "eval_samples_per_second": 31.877, "eval_steps_per_second": 2.198, "step": 500 }, { "epoch": 4.008, "grad_norm": 122.52188873291016, "learning_rate": 1.976e-05, "loss": 2.1988, "step": 501 }, { "epoch": 4.016, "grad_norm": 42.605892181396484, "learning_rate": 1.9800000000000004e-05, "loss": 1.6856, "step": 502 }, { "epoch": 4.024, "grad_norm": 77.1952896118164, "learning_rate": 1.984e-05, "loss": 1.5904, "step": 503 }, { "epoch": 4.032, "grad_norm": 71.19817352294922, "learning_rate": 1.9880000000000003e-05, "loss": 2.0606, "step": 504 }, { "epoch": 4.04, "grad_norm": 30.879274368286133, "learning_rate": 1.992e-05, "loss": 2.0723, "step": 505 }, { "epoch": 4.048, "grad_norm": 57.01908874511719, "learning_rate": 1.9960000000000002e-05, "loss": 2.1131, "step": 506 }, { "epoch": 4.056, "grad_norm": 40.537879943847656, "learning_rate": 2e-05, "loss": 2.3611, "step": 507 }, { "epoch": 4.064, "grad_norm": 129.82350158691406, "learning_rate": 2.004e-05, "loss": 2.3008, "step": 508 }, { "epoch": 4.072, "grad_norm": 90.3904800415039, "learning_rate": 2.008e-05, "loss": 1.9896, "step": 509 }, { "epoch": 4.08, "grad_norm": 147.18455505371094, "learning_rate": 2.012e-05, "loss": 1.9025, "step": 510 }, { "epoch": 4.088, "grad_norm": 40.98686599731445, "learning_rate": 2.016e-05, "loss": 1.918, "step": 511 }, { "epoch": 4.096, "grad_norm": 49.02099609375, "learning_rate": 2.0200000000000003e-05, "loss": 1.9883, "step": 512 }, { "epoch": 4.104, "grad_norm": 71.60553741455078, "learning_rate": 2.024e-05, "loss": 1.7014, "step": 513 }, { "epoch": 4.112, "grad_norm": 52.81759262084961, "learning_rate": 2.0280000000000002e-05, "loss": 2.3113, "step": 514 }, { "epoch": 4.12, "grad_norm": 56.681026458740234, "learning_rate": 2.032e-05, "loss": 2.1503, "step": 515 }, { "epoch": 4.128, "grad_norm": 107.99874877929688, "learning_rate": 2.036e-05, "loss": 3.2659, "step": 516 }, { "epoch": 4.136, "grad_norm": 161.5924835205078, "learning_rate": 2.04e-05, "loss": 2.1193, "step": 517 }, { "epoch": 4.144, "grad_norm": 157.50608825683594, "learning_rate": 2.044e-05, "loss": 2.3228, "step": 518 }, { "epoch": 4.152, "grad_norm": 33.598209381103516, "learning_rate": 2.048e-05, "loss": 2.4678, "step": 519 }, { "epoch": 4.16, "grad_norm": 32.24726104736328, "learning_rate": 2.052e-05, "loss": 2.3122, "step": 520 }, { "epoch": 4.168, "grad_norm": 74.75921630859375, "learning_rate": 2.0560000000000003e-05, "loss": 1.7296, "step": 521 }, { "epoch": 4.176, "grad_norm": 43.759429931640625, "learning_rate": 2.06e-05, "loss": 2.1127, "step": 522 }, { "epoch": 4.184, "grad_norm": 49.11468505859375, "learning_rate": 2.0640000000000002e-05, "loss": 2.3923, "step": 523 }, { "epoch": 4.192, "grad_norm": 79.34437561035156, "learning_rate": 2.0680000000000002e-05, "loss": 1.9564, "step": 524 }, { "epoch": 4.2, "grad_norm": 41.35677719116211, "learning_rate": 2.072e-05, "loss": 1.9602, "step": 525 }, { "epoch": 4.208, "grad_norm": 69.1742935180664, "learning_rate": 2.076e-05, "loss": 1.8017, "step": 526 }, { "epoch": 4.216, "grad_norm": 52.457611083984375, "learning_rate": 2.08e-05, "loss": 2.3068, "step": 527 }, { "epoch": 4.224, "grad_norm": 67.80436706542969, "learning_rate": 2.084e-05, "loss": 1.7946, "step": 528 }, { "epoch": 4.232, "grad_norm": 56.241050720214844, "learning_rate": 2.0880000000000003e-05, "loss": 1.989, "step": 529 }, { "epoch": 4.24, "grad_norm": 183.3625946044922, "learning_rate": 2.092e-05, "loss": 2.0969, "step": 530 }, { "epoch": 4.248, "grad_norm": 72.72171783447266, "learning_rate": 2.0960000000000003e-05, "loss": 1.9435, "step": 531 }, { "epoch": 4.256, "grad_norm": 32.393306732177734, "learning_rate": 2.1e-05, "loss": 1.6129, "step": 532 }, { "epoch": 4.264, "grad_norm": 68.66413116455078, "learning_rate": 2.1040000000000002e-05, "loss": 1.6648, "step": 533 }, { "epoch": 4.272, "grad_norm": 44.05899429321289, "learning_rate": 2.1079999999999998e-05, "loss": 2.0271, "step": 534 }, { "epoch": 4.28, "grad_norm": 76.01612091064453, "learning_rate": 2.112e-05, "loss": 2.465, "step": 535 }, { "epoch": 4.288, "grad_norm": 73.54623413085938, "learning_rate": 2.116e-05, "loss": 2.0925, "step": 536 }, { "epoch": 4.296, "grad_norm": 70.5202407836914, "learning_rate": 2.12e-05, "loss": 1.8116, "step": 537 }, { "epoch": 4.304, "grad_norm": 88.09564971923828, "learning_rate": 2.124e-05, "loss": 1.9582, "step": 538 }, { "epoch": 4.312, "grad_norm": 31.228931427001953, "learning_rate": 2.128e-05, "loss": 2.4008, "step": 539 }, { "epoch": 4.32, "grad_norm": 42.73883056640625, "learning_rate": 2.1320000000000003e-05, "loss": 2.0116, "step": 540 }, { "epoch": 4.328, "grad_norm": 37.437217712402344, "learning_rate": 2.1360000000000002e-05, "loss": 1.6956, "step": 541 }, { "epoch": 4.336, "grad_norm": 122.06768035888672, "learning_rate": 2.1400000000000002e-05, "loss": 1.6441, "step": 542 }, { "epoch": 4.344, "grad_norm": 97.06209564208984, "learning_rate": 2.144e-05, "loss": 2.4391, "step": 543 }, { "epoch": 4.352, "grad_norm": 41.553199768066406, "learning_rate": 2.148e-05, "loss": 2.5871, "step": 544 }, { "epoch": 4.36, "grad_norm": 35.722572326660156, "learning_rate": 2.152e-05, "loss": 2.1453, "step": 545 }, { "epoch": 4.368, "grad_norm": 106.23880004882812, "learning_rate": 2.1560000000000004e-05, "loss": 1.7935, "step": 546 }, { "epoch": 4.376, "grad_norm": 162.36105346679688, "learning_rate": 2.16e-05, "loss": 1.6679, "step": 547 }, { "epoch": 4.384, "grad_norm": 100.69235229492188, "learning_rate": 2.1640000000000003e-05, "loss": 1.7078, "step": 548 }, { "epoch": 4.392, "grad_norm": 47.221092224121094, "learning_rate": 2.168e-05, "loss": 1.853, "step": 549 }, { "epoch": 4.4, "grad_norm": 59.55783462524414, "learning_rate": 2.1720000000000002e-05, "loss": 2.253, "step": 550 }, { "epoch": 4.408, "grad_norm": 33.20173645019531, "learning_rate": 2.176e-05, "loss": 2.1743, "step": 551 }, { "epoch": 4.416, "grad_norm": 81.70272827148438, "learning_rate": 2.18e-05, "loss": 2.2982, "step": 552 }, { "epoch": 4.424, "grad_norm": 32.54505920410156, "learning_rate": 2.184e-05, "loss": 2.0764, "step": 553 }, { "epoch": 4.432, "grad_norm": 90.3313217163086, "learning_rate": 2.188e-05, "loss": 2.0383, "step": 554 }, { "epoch": 4.44, "grad_norm": 34.230838775634766, "learning_rate": 2.192e-05, "loss": 1.6756, "step": 555 }, { "epoch": 4.448, "grad_norm": 79.38548278808594, "learning_rate": 2.196e-05, "loss": 2.0113, "step": 556 }, { "epoch": 4.456, "grad_norm": 153.92835998535156, "learning_rate": 2.2000000000000003e-05, "loss": 1.6179, "step": 557 }, { "epoch": 4.464, "grad_norm": 161.6710662841797, "learning_rate": 2.2040000000000002e-05, "loss": 1.9861, "step": 558 }, { "epoch": 4.4719999999999995, "grad_norm": 172.0291748046875, "learning_rate": 2.2080000000000002e-05, "loss": 2.2648, "step": 559 }, { "epoch": 4.48, "grad_norm": 93.68144226074219, "learning_rate": 2.212e-05, "loss": 2.2037, "step": 560 }, { "epoch": 4.4879999999999995, "grad_norm": 177.24346923828125, "learning_rate": 2.216e-05, "loss": 3.4945, "step": 561 }, { "epoch": 4.496, "grad_norm": 69.9593734741211, "learning_rate": 2.22e-05, "loss": 1.5427, "step": 562 }, { "epoch": 4.504, "grad_norm": 88.00253295898438, "learning_rate": 2.224e-05, "loss": 1.6897, "step": 563 }, { "epoch": 4.5120000000000005, "grad_norm": 59.63442611694336, "learning_rate": 2.228e-05, "loss": 2.6148, "step": 564 }, { "epoch": 4.52, "grad_norm": 38.4323844909668, "learning_rate": 2.2320000000000003e-05, "loss": 2.0097, "step": 565 }, { "epoch": 4.5280000000000005, "grad_norm": 162.82106018066406, "learning_rate": 2.236e-05, "loss": 2.0267, "step": 566 }, { "epoch": 4.536, "grad_norm": 88.49553680419922, "learning_rate": 2.2400000000000002e-05, "loss": 2.3852, "step": 567 }, { "epoch": 4.5440000000000005, "grad_norm": 52.71684265136719, "learning_rate": 2.244e-05, "loss": 2.513, "step": 568 }, { "epoch": 4.552, "grad_norm": 36.1796760559082, "learning_rate": 2.248e-05, "loss": 2.0156, "step": 569 }, { "epoch": 4.5600000000000005, "grad_norm": 68.68534088134766, "learning_rate": 2.252e-05, "loss": 2.1818, "step": 570 }, { "epoch": 4.568, "grad_norm": 56.805755615234375, "learning_rate": 2.256e-05, "loss": 2.0518, "step": 571 }, { "epoch": 4.576, "grad_norm": 56.13420104980469, "learning_rate": 2.26e-05, "loss": 1.8495, "step": 572 }, { "epoch": 4.584, "grad_norm": 68.69255065917969, "learning_rate": 2.264e-05, "loss": 1.9787, "step": 573 }, { "epoch": 4.592, "grad_norm": 52.6536979675293, "learning_rate": 2.268e-05, "loss": 2.0631, "step": 574 }, { "epoch": 4.6, "grad_norm": 47.13468551635742, "learning_rate": 2.2720000000000003e-05, "loss": 1.3832, "step": 575 }, { "epoch": 4.608, "grad_norm": 122.69713592529297, "learning_rate": 2.2760000000000002e-05, "loss": 1.8732, "step": 576 }, { "epoch": 4.616, "grad_norm": 52.283992767333984, "learning_rate": 2.2800000000000002e-05, "loss": 1.7984, "step": 577 }, { "epoch": 4.624, "grad_norm": 47.13487243652344, "learning_rate": 2.284e-05, "loss": 2.0432, "step": 578 }, { "epoch": 4.632, "grad_norm": 72.19902038574219, "learning_rate": 2.288e-05, "loss": 1.8482, "step": 579 }, { "epoch": 4.64, "grad_norm": 42.430946350097656, "learning_rate": 2.292e-05, "loss": 2.086, "step": 580 }, { "epoch": 4.648, "grad_norm": 196.2344207763672, "learning_rate": 2.296e-05, "loss": 1.9903, "step": 581 }, { "epoch": 4.656, "grad_norm": 30.643268585205078, "learning_rate": 2.3000000000000003e-05, "loss": 2.3923, "step": 582 }, { "epoch": 4.664, "grad_norm": 951.78662109375, "learning_rate": 2.304e-05, "loss": 1.9143, "step": 583 }, { "epoch": 4.672, "grad_norm": 77.1088638305664, "learning_rate": 2.3080000000000003e-05, "loss": 1.8979, "step": 584 }, { "epoch": 4.68, "grad_norm": 49.17723083496094, "learning_rate": 2.312e-05, "loss": 2.0624, "step": 585 }, { "epoch": 4.688, "grad_norm": 37.36810302734375, "learning_rate": 2.3160000000000002e-05, "loss": 1.572, "step": 586 }, { "epoch": 4.696, "grad_norm": 41.09722900390625, "learning_rate": 2.32e-05, "loss": 2.2123, "step": 587 }, { "epoch": 4.704, "grad_norm": 548.2877807617188, "learning_rate": 2.324e-05, "loss": 2.1686, "step": 588 }, { "epoch": 4.712, "grad_norm": 42.722312927246094, "learning_rate": 2.328e-05, "loss": 1.7623, "step": 589 }, { "epoch": 4.72, "grad_norm": 43.173038482666016, "learning_rate": 2.332e-05, "loss": 2.3983, "step": 590 }, { "epoch": 4.728, "grad_norm": 58.2569694519043, "learning_rate": 2.336e-05, "loss": 1.6757, "step": 591 }, { "epoch": 4.736, "grad_norm": 45.85189437866211, "learning_rate": 2.3400000000000003e-05, "loss": 1.8671, "step": 592 }, { "epoch": 4.744, "grad_norm": 30.339353561401367, "learning_rate": 2.344e-05, "loss": 1.6351, "step": 593 }, { "epoch": 4.752, "grad_norm": 35.74654769897461, "learning_rate": 2.3480000000000002e-05, "loss": 1.8109, "step": 594 }, { "epoch": 4.76, "grad_norm": 52.55023956298828, "learning_rate": 2.3520000000000002e-05, "loss": 2.6679, "step": 595 }, { "epoch": 4.768, "grad_norm": 34.61554718017578, "learning_rate": 2.356e-05, "loss": 2.2438, "step": 596 }, { "epoch": 4.776, "grad_norm": 39.38899612426758, "learning_rate": 2.36e-05, "loss": 1.8263, "step": 597 }, { "epoch": 4.784, "grad_norm": 33.75263595581055, "learning_rate": 2.364e-05, "loss": 1.4909, "step": 598 }, { "epoch": 4.792, "grad_norm": 56.641963958740234, "learning_rate": 2.3680000000000004e-05, "loss": 1.68, "step": 599 }, { "epoch": 4.8, "grad_norm": 66.91716766357422, "learning_rate": 2.372e-05, "loss": 1.8447, "step": 600 }, { "epoch": 4.808, "grad_norm": 37.94300079345703, "learning_rate": 2.3760000000000003e-05, "loss": 2.5496, "step": 601 }, { "epoch": 4.816, "grad_norm": 51.485023498535156, "learning_rate": 2.38e-05, "loss": 1.6385, "step": 602 }, { "epoch": 4.824, "grad_norm": 32.5924186706543, "learning_rate": 2.3840000000000002e-05, "loss": 1.9473, "step": 603 }, { "epoch": 4.832, "grad_norm": 106.7143783569336, "learning_rate": 2.3880000000000002e-05, "loss": 2.038, "step": 604 }, { "epoch": 4.84, "grad_norm": 58.73044967651367, "learning_rate": 2.392e-05, "loss": 1.8902, "step": 605 }, { "epoch": 4.848, "grad_norm": 39.470123291015625, "learning_rate": 2.396e-05, "loss": 2.0107, "step": 606 }, { "epoch": 4.856, "grad_norm": 60.20596694946289, "learning_rate": 2.4e-05, "loss": 1.8419, "step": 607 }, { "epoch": 4.864, "grad_norm": 49.830867767333984, "learning_rate": 2.404e-05, "loss": 1.783, "step": 608 }, { "epoch": 4.872, "grad_norm": 42.54050827026367, "learning_rate": 2.408e-05, "loss": 2.4057, "step": 609 }, { "epoch": 4.88, "grad_norm": 63.57254409790039, "learning_rate": 2.412e-05, "loss": 2.0286, "step": 610 }, { "epoch": 4.888, "grad_norm": 44.553550720214844, "learning_rate": 2.4160000000000002e-05, "loss": 1.7461, "step": 611 }, { "epoch": 4.896, "grad_norm": 298.9317321777344, "learning_rate": 2.4200000000000002e-05, "loss": 2.188, "step": 612 }, { "epoch": 4.904, "grad_norm": 27.88445281982422, "learning_rate": 2.4240000000000002e-05, "loss": 2.3283, "step": 613 }, { "epoch": 4.912, "grad_norm": 61.766971588134766, "learning_rate": 2.428e-05, "loss": 1.9411, "step": 614 }, { "epoch": 4.92, "grad_norm": 42.87358856201172, "learning_rate": 2.432e-05, "loss": 1.427, "step": 615 }, { "epoch": 4.928, "grad_norm": 50.16923522949219, "learning_rate": 2.4360000000000004e-05, "loss": 2.0985, "step": 616 }, { "epoch": 4.936, "grad_norm": 50.44894790649414, "learning_rate": 2.44e-05, "loss": 1.6218, "step": 617 }, { "epoch": 4.944, "grad_norm": 244.07835388183594, "learning_rate": 2.4440000000000003e-05, "loss": 1.8616, "step": 618 }, { "epoch": 4.952, "grad_norm": 48.82166290283203, "learning_rate": 2.448e-05, "loss": 2.5404, "step": 619 }, { "epoch": 4.96, "grad_norm": 32.95149612426758, "learning_rate": 2.4520000000000002e-05, "loss": 2.2285, "step": 620 }, { "epoch": 4.968, "grad_norm": 74.49623107910156, "learning_rate": 2.4560000000000002e-05, "loss": 1.9709, "step": 621 }, { "epoch": 4.976, "grad_norm": 56.08809280395508, "learning_rate": 2.46e-05, "loss": 2.4557, "step": 622 }, { "epoch": 4.984, "grad_norm": 133.99642944335938, "learning_rate": 2.464e-05, "loss": 2.2259, "step": 623 }, { "epoch": 4.992, "grad_norm": 29.51444435119629, "learning_rate": 2.468e-05, "loss": 2.833, "step": 624 }, { "epoch": 5.0, "grad_norm": 133.69027709960938, "learning_rate": 2.472e-05, "loss": 2.0184, "step": 625 }, { "epoch": 5.0, "eval_loss": 2.112443447113037, "eval_map": 0.0583, "eval_map_50": 0.1254, "eval_map_75": 0.0494, "eval_map_Coverall": 0.2155, "eval_map_Face_Shield": 0.0, "eval_map_Gloves": 0.0415, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0345, "eval_map_large": 0.0623, "eval_map_medium": 0.0201, "eval_map_small": 0.0206, "eval_mar_1": 0.0888, "eval_mar_10": 0.193, "eval_mar_100": 0.2253, "eval_mar_100_Coverall": 0.6178, "eval_mar_100_Face_Shield": 0.0, "eval_mar_100_Gloves": 0.2508, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.2577, "eval_mar_large": 0.2383, "eval_mar_medium": 0.1338, "eval_mar_small": 0.0403, "eval_runtime": 0.9066, "eval_samples_per_second": 31.988, "eval_steps_per_second": 2.206, "step": 625 }, { "epoch": 5.008, "grad_norm": 41.4932975769043, "learning_rate": 2.476e-05, "loss": 1.4436, "step": 626 }, { "epoch": 5.016, "grad_norm": 95.30872344970703, "learning_rate": 2.48e-05, "loss": 1.7193, "step": 627 }, { "epoch": 5.024, "grad_norm": 94.24429321289062, "learning_rate": 2.4840000000000003e-05, "loss": 2.1764, "step": 628 }, { "epoch": 5.032, "grad_norm": 40.04389190673828, "learning_rate": 2.488e-05, "loss": 1.8295, "step": 629 }, { "epoch": 5.04, "grad_norm": 28.108537673950195, "learning_rate": 2.4920000000000002e-05, "loss": 1.7435, "step": 630 }, { "epoch": 5.048, "grad_norm": 56.48093795776367, "learning_rate": 2.496e-05, "loss": 1.485, "step": 631 }, { "epoch": 5.056, "grad_norm": 34.94474792480469, "learning_rate": 2.5e-05, "loss": 1.6674, "step": 632 }, { "epoch": 5.064, "grad_norm": 48.064308166503906, "learning_rate": 2.504e-05, "loss": 1.9372, "step": 633 }, { "epoch": 5.072, "grad_norm": 33.32338333129883, "learning_rate": 2.5080000000000004e-05, "loss": 1.872, "step": 634 }, { "epoch": 5.08, "grad_norm": 51.12873458862305, "learning_rate": 2.512e-05, "loss": 2.1392, "step": 635 }, { "epoch": 5.088, "grad_norm": 98.16068267822266, "learning_rate": 2.516e-05, "loss": 1.8593, "step": 636 }, { "epoch": 5.096, "grad_norm": 75.25360870361328, "learning_rate": 2.5200000000000003e-05, "loss": 2.558, "step": 637 }, { "epoch": 5.104, "grad_norm": 73.65399169921875, "learning_rate": 2.5240000000000002e-05, "loss": 1.6001, "step": 638 }, { "epoch": 5.112, "grad_norm": 28.202295303344727, "learning_rate": 2.5280000000000005e-05, "loss": 1.8195, "step": 639 }, { "epoch": 5.12, "grad_norm": 55.199920654296875, "learning_rate": 2.5319999999999998e-05, "loss": 2.0257, "step": 640 }, { "epoch": 5.128, "grad_norm": 54.9165153503418, "learning_rate": 2.536e-05, "loss": 2.0086, "step": 641 }, { "epoch": 5.136, "grad_norm": 80.77020263671875, "learning_rate": 2.54e-05, "loss": 2.2847, "step": 642 }, { "epoch": 5.144, "grad_norm": 60.783538818359375, "learning_rate": 2.5440000000000004e-05, "loss": 2.0619, "step": 643 }, { "epoch": 5.152, "grad_norm": 38.0918083190918, "learning_rate": 2.5480000000000003e-05, "loss": 1.8563, "step": 644 }, { "epoch": 5.16, "grad_norm": 37.979557037353516, "learning_rate": 2.552e-05, "loss": 2.013, "step": 645 }, { "epoch": 5.168, "grad_norm": 68.49000549316406, "learning_rate": 2.556e-05, "loss": 1.9969, "step": 646 }, { "epoch": 5.176, "grad_norm": 29.69040870666504, "learning_rate": 2.5600000000000002e-05, "loss": 2.0199, "step": 647 }, { "epoch": 5.184, "grad_norm": 61.835086822509766, "learning_rate": 2.5640000000000002e-05, "loss": 2.1879, "step": 648 }, { "epoch": 5.192, "grad_norm": 57.39112854003906, "learning_rate": 2.5679999999999998e-05, "loss": 2.0905, "step": 649 }, { "epoch": 5.2, "grad_norm": 25.08207130432129, "learning_rate": 2.572e-05, "loss": 2.2591, "step": 650 }, { "epoch": 5.208, "grad_norm": 86.50601959228516, "learning_rate": 2.576e-05, "loss": 2.135, "step": 651 }, { "epoch": 5.216, "grad_norm": 83.91463470458984, "learning_rate": 2.58e-05, "loss": 2.344, "step": 652 }, { "epoch": 5.224, "grad_norm": 40.418582916259766, "learning_rate": 2.5840000000000003e-05, "loss": 1.8624, "step": 653 }, { "epoch": 5.232, "grad_norm": 180.04962158203125, "learning_rate": 2.588e-05, "loss": 1.7554, "step": 654 }, { "epoch": 5.24, "grad_norm": 60.35667037963867, "learning_rate": 2.592e-05, "loss": 2.0548, "step": 655 }, { "epoch": 5.248, "grad_norm": 55.750633239746094, "learning_rate": 2.5960000000000002e-05, "loss": 1.9506, "step": 656 }, { "epoch": 5.256, "grad_norm": 124.45177459716797, "learning_rate": 2.6000000000000002e-05, "loss": 1.845, "step": 657 }, { "epoch": 5.264, "grad_norm": 38.4631233215332, "learning_rate": 2.6040000000000005e-05, "loss": 3.3266, "step": 658 }, { "epoch": 5.272, "grad_norm": 192.1752471923828, "learning_rate": 2.6079999999999998e-05, "loss": 1.8579, "step": 659 }, { "epoch": 5.28, "grad_norm": 36.981693267822266, "learning_rate": 2.612e-05, "loss": 1.558, "step": 660 }, { "epoch": 5.288, "grad_norm": 43.99863815307617, "learning_rate": 2.616e-05, "loss": 2.1547, "step": 661 }, { "epoch": 5.296, "grad_norm": 33.85749053955078, "learning_rate": 2.6200000000000003e-05, "loss": 2.6249, "step": 662 }, { "epoch": 5.304, "grad_norm": 302.2639465332031, "learning_rate": 2.6240000000000003e-05, "loss": 2.1903, "step": 663 }, { "epoch": 5.312, "grad_norm": 55.41334533691406, "learning_rate": 2.628e-05, "loss": 1.7604, "step": 664 }, { "epoch": 5.32, "grad_norm": 56.64759826660156, "learning_rate": 2.632e-05, "loss": 2.0741, "step": 665 }, { "epoch": 5.328, "grad_norm": 51.21990966796875, "learning_rate": 2.6360000000000002e-05, "loss": 1.9967, "step": 666 }, { "epoch": 5.336, "grad_norm": 49.0184326171875, "learning_rate": 2.64e-05, "loss": 1.8321, "step": 667 }, { "epoch": 5.344, "grad_norm": 35.285675048828125, "learning_rate": 2.6440000000000004e-05, "loss": 2.0232, "step": 668 }, { "epoch": 5.352, "grad_norm": 254.8197021484375, "learning_rate": 2.648e-05, "loss": 1.8201, "step": 669 }, { "epoch": 5.36, "grad_norm": 101.88334655761719, "learning_rate": 2.652e-05, "loss": 1.967, "step": 670 }, { "epoch": 5.368, "grad_norm": 137.96861267089844, "learning_rate": 2.6560000000000003e-05, "loss": 1.7113, "step": 671 }, { "epoch": 5.376, "grad_norm": 46.4167366027832, "learning_rate": 2.6600000000000003e-05, "loss": 2.1287, "step": 672 }, { "epoch": 5.384, "grad_norm": 310.5696716308594, "learning_rate": 2.6640000000000002e-05, "loss": 2.4107, "step": 673 }, { "epoch": 5.392, "grad_norm": 104.41573333740234, "learning_rate": 2.668e-05, "loss": 2.144, "step": 674 }, { "epoch": 5.4, "grad_norm": 47.978759765625, "learning_rate": 2.672e-05, "loss": 2.2057, "step": 675 }, { "epoch": 5.408, "grad_norm": 247.9720916748047, "learning_rate": 2.676e-05, "loss": 2.2557, "step": 676 }, { "epoch": 5.416, "grad_norm": 84.36931610107422, "learning_rate": 2.6800000000000004e-05, "loss": 1.7019, "step": 677 }, { "epoch": 5.424, "grad_norm": 55.79954147338867, "learning_rate": 2.6840000000000004e-05, "loss": 2.1234, "step": 678 }, { "epoch": 5.432, "grad_norm": 139.1399688720703, "learning_rate": 2.688e-05, "loss": 2.056, "step": 679 }, { "epoch": 5.44, "grad_norm": 117.61969757080078, "learning_rate": 2.692e-05, "loss": 2.3178, "step": 680 }, { "epoch": 5.448, "grad_norm": 85.24048614501953, "learning_rate": 2.6960000000000003e-05, "loss": 1.9044, "step": 681 }, { "epoch": 5.456, "grad_norm": 114.89472198486328, "learning_rate": 2.7000000000000002e-05, "loss": 2.3357, "step": 682 }, { "epoch": 5.464, "grad_norm": 26.172550201416016, "learning_rate": 2.704e-05, "loss": 1.8996, "step": 683 }, { "epoch": 5.4719999999999995, "grad_norm": 63.550228118896484, "learning_rate": 2.7079999999999998e-05, "loss": 2.0383, "step": 684 }, { "epoch": 5.48, "grad_norm": 54.211116790771484, "learning_rate": 2.712e-05, "loss": 2.0017, "step": 685 }, { "epoch": 5.4879999999999995, "grad_norm": 120.04081726074219, "learning_rate": 2.716e-05, "loss": 2.23, "step": 686 }, { "epoch": 5.496, "grad_norm": 80.9105224609375, "learning_rate": 2.7200000000000004e-05, "loss": 1.9324, "step": 687 }, { "epoch": 5.504, "grad_norm": 35.32963943481445, "learning_rate": 2.724e-05, "loss": 1.745, "step": 688 }, { "epoch": 5.5120000000000005, "grad_norm": 48.45976638793945, "learning_rate": 2.728e-05, "loss": 1.9774, "step": 689 }, { "epoch": 5.52, "grad_norm": 35.633724212646484, "learning_rate": 2.7320000000000003e-05, "loss": 1.6124, "step": 690 }, { "epoch": 5.5280000000000005, "grad_norm": 54.02409744262695, "learning_rate": 2.7360000000000002e-05, "loss": 1.9467, "step": 691 }, { "epoch": 5.536, "grad_norm": 38.41614532470703, "learning_rate": 2.7400000000000002e-05, "loss": 1.8354, "step": 692 }, { "epoch": 5.5440000000000005, "grad_norm": 118.07003021240234, "learning_rate": 2.7439999999999998e-05, "loss": 2.5541, "step": 693 }, { "epoch": 5.552, "grad_norm": 32.27016067504883, "learning_rate": 2.748e-05, "loss": 1.5142, "step": 694 }, { "epoch": 5.5600000000000005, "grad_norm": 68.51676940917969, "learning_rate": 2.752e-05, "loss": 2.4286, "step": 695 }, { "epoch": 5.568, "grad_norm": 119.92398071289062, "learning_rate": 2.7560000000000004e-05, "loss": 2.2825, "step": 696 }, { "epoch": 5.576, "grad_norm": 109.30038452148438, "learning_rate": 2.7600000000000003e-05, "loss": 1.9702, "step": 697 }, { "epoch": 5.584, "grad_norm": 147.62806701660156, "learning_rate": 2.764e-05, "loss": 2.0974, "step": 698 }, { "epoch": 5.592, "grad_norm": 114.25981140136719, "learning_rate": 2.768e-05, "loss": 1.8128, "step": 699 }, { "epoch": 5.6, "grad_norm": 32.814796447753906, "learning_rate": 2.7720000000000002e-05, "loss": 1.5981, "step": 700 }, { "epoch": 5.608, "grad_norm": 65.79060363769531, "learning_rate": 2.7760000000000002e-05, "loss": 2.1247, "step": 701 }, { "epoch": 5.616, "grad_norm": 60.4302864074707, "learning_rate": 2.7800000000000005e-05, "loss": 2.0716, "step": 702 }, { "epoch": 5.624, "grad_norm": 93.78214263916016, "learning_rate": 2.7839999999999998e-05, "loss": 1.6743, "step": 703 }, { "epoch": 5.632, "grad_norm": 41.024658203125, "learning_rate": 2.788e-05, "loss": 2.0004, "step": 704 }, { "epoch": 5.64, "grad_norm": 55.63945770263672, "learning_rate": 2.792e-05, "loss": 1.6123, "step": 705 }, { "epoch": 5.648, "grad_norm": 44.074501037597656, "learning_rate": 2.7960000000000003e-05, "loss": 2.0318, "step": 706 }, { "epoch": 5.656, "grad_norm": 40.348388671875, "learning_rate": 2.8000000000000003e-05, "loss": 2.026, "step": 707 }, { "epoch": 5.664, "grad_norm": 58.89463424682617, "learning_rate": 2.804e-05, "loss": 1.55, "step": 708 }, { "epoch": 5.672, "grad_norm": 195.98114013671875, "learning_rate": 2.8080000000000002e-05, "loss": 2.471, "step": 709 }, { "epoch": 5.68, "grad_norm": 95.2086410522461, "learning_rate": 2.8120000000000002e-05, "loss": 1.8563, "step": 710 }, { "epoch": 5.688, "grad_norm": 134.67431640625, "learning_rate": 2.816e-05, "loss": 2.795, "step": 711 }, { "epoch": 5.696, "grad_norm": 33.849796295166016, "learning_rate": 2.8199999999999998e-05, "loss": 1.8673, "step": 712 }, { "epoch": 5.704, "grad_norm": 63.14746856689453, "learning_rate": 2.824e-05, "loss": 2.2625, "step": 713 }, { "epoch": 5.712, "grad_norm": 35.919376373291016, "learning_rate": 2.828e-05, "loss": 1.5396, "step": 714 }, { "epoch": 5.72, "grad_norm": 60.20398712158203, "learning_rate": 2.8320000000000003e-05, "loss": 1.4989, "step": 715 }, { "epoch": 5.728, "grad_norm": 77.80828857421875, "learning_rate": 2.8360000000000003e-05, "loss": 1.7062, "step": 716 }, { "epoch": 5.736, "grad_norm": 48.19048309326172, "learning_rate": 2.84e-05, "loss": 2.1165, "step": 717 }, { "epoch": 5.744, "grad_norm": 103.58833312988281, "learning_rate": 2.844e-05, "loss": 2.3551, "step": 718 }, { "epoch": 5.752, "grad_norm": 68.3095932006836, "learning_rate": 2.8480000000000002e-05, "loss": 2.0667, "step": 719 }, { "epoch": 5.76, "grad_norm": 40.04296875, "learning_rate": 2.852e-05, "loss": 2.3367, "step": 720 }, { "epoch": 5.768, "grad_norm": 35.30561828613281, "learning_rate": 2.8560000000000004e-05, "loss": 2.573, "step": 721 }, { "epoch": 5.776, "grad_norm": 138.9660186767578, "learning_rate": 2.86e-05, "loss": 2.1548, "step": 722 }, { "epoch": 5.784, "grad_norm": 158.5631866455078, "learning_rate": 2.864e-05, "loss": 2.7085, "step": 723 }, { "epoch": 5.792, "grad_norm": 90.23500061035156, "learning_rate": 2.868e-05, "loss": 1.8824, "step": 724 }, { "epoch": 5.8, "grad_norm": 107.63327026367188, "learning_rate": 2.8720000000000003e-05, "loss": 2.119, "step": 725 }, { "epoch": 5.808, "grad_norm": 436.3948974609375, "learning_rate": 2.8760000000000002e-05, "loss": 3.8338, "step": 726 }, { "epoch": 5.816, "grad_norm": 37.84270477294922, "learning_rate": 2.88e-05, "loss": 1.7992, "step": 727 }, { "epoch": 5.824, "grad_norm": 135.66744995117188, "learning_rate": 2.8840000000000002e-05, "loss": 1.5907, "step": 728 }, { "epoch": 5.832, "grad_norm": 48.451080322265625, "learning_rate": 2.888e-05, "loss": 1.8327, "step": 729 }, { "epoch": 5.84, "grad_norm": 44.42280578613281, "learning_rate": 2.8920000000000004e-05, "loss": 1.9119, "step": 730 }, { "epoch": 5.848, "grad_norm": 35.656375885009766, "learning_rate": 2.8960000000000004e-05, "loss": 2.1175, "step": 731 }, { "epoch": 5.856, "grad_norm": 38.3781852722168, "learning_rate": 2.9e-05, "loss": 2.745, "step": 732 }, { "epoch": 5.864, "grad_norm": 22.685226440429688, "learning_rate": 2.904e-05, "loss": 1.6626, "step": 733 }, { "epoch": 5.872, "grad_norm": 43.9620246887207, "learning_rate": 2.9080000000000003e-05, "loss": 2.0361, "step": 734 }, { "epoch": 5.88, "grad_norm": 27.246204376220703, "learning_rate": 2.9120000000000002e-05, "loss": 1.7796, "step": 735 }, { "epoch": 5.888, "grad_norm": 72.58216094970703, "learning_rate": 2.9160000000000005e-05, "loss": 2.4033, "step": 736 }, { "epoch": 5.896, "grad_norm": 98.9051284790039, "learning_rate": 2.9199999999999998e-05, "loss": 2.0448, "step": 737 }, { "epoch": 5.904, "grad_norm": 66.83812713623047, "learning_rate": 2.924e-05, "loss": 1.8016, "step": 738 }, { "epoch": 5.912, "grad_norm": 55.51264572143555, "learning_rate": 2.928e-05, "loss": 2.0093, "step": 739 }, { "epoch": 5.92, "grad_norm": 37.750709533691406, "learning_rate": 2.9320000000000004e-05, "loss": 2.0864, "step": 740 }, { "epoch": 5.928, "grad_norm": 121.66107177734375, "learning_rate": 2.9360000000000003e-05, "loss": 2.6759, "step": 741 }, { "epoch": 5.936, "grad_norm": 143.7084503173828, "learning_rate": 2.94e-05, "loss": 1.6398, "step": 742 }, { "epoch": 5.944, "grad_norm": 76.93590545654297, "learning_rate": 2.944e-05, "loss": 2.2423, "step": 743 }, { "epoch": 5.952, "grad_norm": 51.447914123535156, "learning_rate": 2.9480000000000002e-05, "loss": 2.4823, "step": 744 }, { "epoch": 5.96, "grad_norm": 26.23589515686035, "learning_rate": 2.9520000000000002e-05, "loss": 1.6112, "step": 745 }, { "epoch": 5.968, "grad_norm": 53.20204544067383, "learning_rate": 2.9559999999999998e-05, "loss": 1.8521, "step": 746 }, { "epoch": 5.976, "grad_norm": 79.80268859863281, "learning_rate": 2.96e-05, "loss": 1.952, "step": 747 }, { "epoch": 5.984, "grad_norm": 45.57666778564453, "learning_rate": 2.964e-05, "loss": 2.1955, "step": 748 }, { "epoch": 5.992, "grad_norm": 24.594758987426758, "learning_rate": 2.9680000000000004e-05, "loss": 2.0847, "step": 749 }, { "epoch": 6.0, "grad_norm": 28.298480987548828, "learning_rate": 2.9720000000000003e-05, "loss": 1.65, "step": 750 }, { "epoch": 6.0, "eval_loss": 2.0246496200561523, "eval_map": 0.0673, "eval_map_50": 0.1329, "eval_map_75": 0.0565, "eval_map_Coverall": 0.2507, "eval_map_Face_Shield": 0.0, "eval_map_Gloves": 0.0352, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0507, "eval_map_large": 0.0886, "eval_map_medium": 0.0212, "eval_map_small": 0.0229, "eval_mar_1": 0.0948, "eval_mar_10": 0.208, "eval_mar_100": 0.2465, "eval_mar_100_Coverall": 0.6889, "eval_mar_100_Face_Shield": 0.0, "eval_mar_100_Gloves": 0.2361, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.3077, "eval_mar_large": 0.2202, "eval_mar_medium": 0.1493, "eval_mar_small": 0.0419, "eval_runtime": 0.9007, "eval_samples_per_second": 32.197, "eval_steps_per_second": 2.22, "step": 750 }, { "epoch": 6.008, "grad_norm": 46.73942184448242, "learning_rate": 2.976e-05, "loss": 1.6208, "step": 751 }, { "epoch": 6.016, "grad_norm": 31.264326095581055, "learning_rate": 2.98e-05, "loss": 1.5463, "step": 752 }, { "epoch": 6.024, "grad_norm": 27.944744110107422, "learning_rate": 2.9840000000000002e-05, "loss": 1.6677, "step": 753 }, { "epoch": 6.032, "grad_norm": 39.93405532836914, "learning_rate": 2.9880000000000002e-05, "loss": 1.9448, "step": 754 }, { "epoch": 6.04, "grad_norm": 25.342052459716797, "learning_rate": 2.9920000000000005e-05, "loss": 1.9317, "step": 755 }, { "epoch": 6.048, "grad_norm": 24.361291885375977, "learning_rate": 2.9959999999999998e-05, "loss": 2.2605, "step": 756 }, { "epoch": 6.056, "grad_norm": 25.899925231933594, "learning_rate": 3e-05, "loss": 1.7122, "step": 757 }, { "epoch": 6.064, "grad_norm": 54.434173583984375, "learning_rate": 3.004e-05, "loss": 2.3595, "step": 758 }, { "epoch": 6.072, "grad_norm": 31.96658706665039, "learning_rate": 3.0080000000000003e-05, "loss": 1.5675, "step": 759 }, { "epoch": 6.08, "grad_norm": 30.02750587463379, "learning_rate": 3.0120000000000003e-05, "loss": 1.4593, "step": 760 }, { "epoch": 6.088, "grad_norm": 28.717222213745117, "learning_rate": 3.016e-05, "loss": 2.2191, "step": 761 }, { "epoch": 6.096, "grad_norm": 24.86085319519043, "learning_rate": 3.02e-05, "loss": 1.8593, "step": 762 }, { "epoch": 6.104, "grad_norm": 180.4688720703125, "learning_rate": 3.0240000000000002e-05, "loss": 1.8013, "step": 763 }, { "epoch": 6.112, "grad_norm": 46.24912643432617, "learning_rate": 3.028e-05, "loss": 1.7233, "step": 764 }, { "epoch": 6.12, "grad_norm": 33.37835693359375, "learning_rate": 3.0320000000000004e-05, "loss": 2.0961, "step": 765 }, { "epoch": 6.128, "grad_norm": 37.91519546508789, "learning_rate": 3.036e-05, "loss": 1.7027, "step": 766 }, { "epoch": 6.136, "grad_norm": 58.14091110229492, "learning_rate": 3.04e-05, "loss": 1.4606, "step": 767 }, { "epoch": 6.144, "grad_norm": 29.66106605529785, "learning_rate": 3.0440000000000003e-05, "loss": 1.474, "step": 768 }, { "epoch": 6.152, "grad_norm": 93.17139434814453, "learning_rate": 3.0480000000000003e-05, "loss": 1.6298, "step": 769 }, { "epoch": 6.16, "grad_norm": 67.9444351196289, "learning_rate": 3.0520000000000006e-05, "loss": 1.6934, "step": 770 }, { "epoch": 6.168, "grad_norm": 65.94824981689453, "learning_rate": 3.056e-05, "loss": 2.2569, "step": 771 }, { "epoch": 6.176, "grad_norm": 54.10918045043945, "learning_rate": 3.06e-05, "loss": 2.1771, "step": 772 }, { "epoch": 6.184, "grad_norm": 58.85057830810547, "learning_rate": 3.0640000000000005e-05, "loss": 1.6699, "step": 773 }, { "epoch": 6.192, "grad_norm": 45.9112434387207, "learning_rate": 3.0680000000000004e-05, "loss": 1.7153, "step": 774 }, { "epoch": 6.2, "grad_norm": 67.32225036621094, "learning_rate": 3.072e-05, "loss": 1.8382, "step": 775 }, { "epoch": 6.208, "grad_norm": 51.03566360473633, "learning_rate": 3.076e-05, "loss": 1.5258, "step": 776 }, { "epoch": 6.216, "grad_norm": 36.36784362792969, "learning_rate": 3.08e-05, "loss": 1.5498, "step": 777 }, { "epoch": 6.224, "grad_norm": 32.00996780395508, "learning_rate": 3.084e-05, "loss": 1.7672, "step": 778 }, { "epoch": 6.232, "grad_norm": 200.28085327148438, "learning_rate": 3.088e-05, "loss": 1.7934, "step": 779 }, { "epoch": 6.24, "grad_norm": 37.71492004394531, "learning_rate": 3.092e-05, "loss": 2.1204, "step": 780 }, { "epoch": 6.248, "grad_norm": 28.35658836364746, "learning_rate": 3.096e-05, "loss": 2.4307, "step": 781 }, { "epoch": 6.256, "grad_norm": 35.0727424621582, "learning_rate": 3.1e-05, "loss": 1.5359, "step": 782 }, { "epoch": 6.264, "grad_norm": 45.412681579589844, "learning_rate": 3.104e-05, "loss": 1.8692, "step": 783 }, { "epoch": 6.272, "grad_norm": 37.6780891418457, "learning_rate": 3.108e-05, "loss": 2.4197, "step": 784 }, { "epoch": 6.28, "grad_norm": 33.22783279418945, "learning_rate": 3.112e-05, "loss": 1.4404, "step": 785 }, { "epoch": 6.288, "grad_norm": 33.62567901611328, "learning_rate": 3.116e-05, "loss": 2.2689, "step": 786 }, { "epoch": 6.296, "grad_norm": 41.782222747802734, "learning_rate": 3.12e-05, "loss": 1.8208, "step": 787 }, { "epoch": 6.304, "grad_norm": 26.035003662109375, "learning_rate": 3.1240000000000006e-05, "loss": 1.7116, "step": 788 }, { "epoch": 6.312, "grad_norm": 31.202117919921875, "learning_rate": 3.1280000000000005e-05, "loss": 1.8688, "step": 789 }, { "epoch": 6.32, "grad_norm": 32.84320068359375, "learning_rate": 3.132e-05, "loss": 1.9407, "step": 790 }, { "epoch": 6.328, "grad_norm": 35.985321044921875, "learning_rate": 3.136e-05, "loss": 1.5577, "step": 791 }, { "epoch": 6.336, "grad_norm": 27.890438079833984, "learning_rate": 3.1400000000000004e-05, "loss": 2.0472, "step": 792 }, { "epoch": 6.344, "grad_norm": 22.788410186767578, "learning_rate": 3.1440000000000004e-05, "loss": 2.0326, "step": 793 }, { "epoch": 6.352, "grad_norm": 34.39364242553711, "learning_rate": 3.1480000000000004e-05, "loss": 1.5118, "step": 794 }, { "epoch": 6.36, "grad_norm": 34.005165100097656, "learning_rate": 3.1519999999999996e-05, "loss": 2.0048, "step": 795 }, { "epoch": 6.368, "grad_norm": 34.256805419921875, "learning_rate": 3.156e-05, "loss": 2.2963, "step": 796 }, { "epoch": 6.376, "grad_norm": 23.829898834228516, "learning_rate": 3.16e-05, "loss": 1.5001, "step": 797 }, { "epoch": 6.384, "grad_norm": 66.85436248779297, "learning_rate": 3.164e-05, "loss": 1.3648, "step": 798 }, { "epoch": 6.392, "grad_norm": 32.89874267578125, "learning_rate": 3.168e-05, "loss": 1.9509, "step": 799 }, { "epoch": 6.4, "grad_norm": 29.824520111083984, "learning_rate": 3.172e-05, "loss": 1.7049, "step": 800 }, { "epoch": 6.408, "grad_norm": 43.43797302246094, "learning_rate": 3.176e-05, "loss": 2.2462, "step": 801 }, { "epoch": 6.416, "grad_norm": 70.51484680175781, "learning_rate": 3.18e-05, "loss": 1.6825, "step": 802 }, { "epoch": 6.424, "grad_norm": 46.79685592651367, "learning_rate": 3.184e-05, "loss": 1.6041, "step": 803 }, { "epoch": 6.432, "grad_norm": 48.9615478515625, "learning_rate": 3.188e-05, "loss": 1.9005, "step": 804 }, { "epoch": 6.44, "grad_norm": 59.91651153564453, "learning_rate": 3.192e-05, "loss": 1.551, "step": 805 }, { "epoch": 6.448, "grad_norm": 51.63095474243164, "learning_rate": 3.196e-05, "loss": 1.5547, "step": 806 }, { "epoch": 6.456, "grad_norm": 43.094051361083984, "learning_rate": 3.2000000000000005e-05, "loss": 1.6335, "step": 807 }, { "epoch": 6.464, "grad_norm": 45.63566970825195, "learning_rate": 3.2040000000000005e-05, "loss": 1.5983, "step": 808 }, { "epoch": 6.4719999999999995, "grad_norm": 31.98708724975586, "learning_rate": 3.208e-05, "loss": 2.1021, "step": 809 }, { "epoch": 6.48, "grad_norm": 40.88050842285156, "learning_rate": 3.212e-05, "loss": 2.3173, "step": 810 }, { "epoch": 6.4879999999999995, "grad_norm": 42.6568717956543, "learning_rate": 3.2160000000000004e-05, "loss": 1.9427, "step": 811 }, { "epoch": 6.496, "grad_norm": 53.29558181762695, "learning_rate": 3.2200000000000003e-05, "loss": 1.6715, "step": 812 }, { "epoch": 6.504, "grad_norm": 26.407316207885742, "learning_rate": 3.224e-05, "loss": 1.6936, "step": 813 }, { "epoch": 6.5120000000000005, "grad_norm": 49.12617874145508, "learning_rate": 3.2279999999999996e-05, "loss": 2.2311, "step": 814 }, { "epoch": 6.52, "grad_norm": 27.32025146484375, "learning_rate": 3.232e-05, "loss": 1.8746, "step": 815 }, { "epoch": 6.5280000000000005, "grad_norm": 57.2545280456543, "learning_rate": 3.236e-05, "loss": 2.6922, "step": 816 }, { "epoch": 6.536, "grad_norm": 59.3455810546875, "learning_rate": 3.24e-05, "loss": 2.0046, "step": 817 }, { "epoch": 6.5440000000000005, "grad_norm": 36.54896545410156, "learning_rate": 3.244e-05, "loss": 1.5743, "step": 818 }, { "epoch": 6.552, "grad_norm": 21.185670852661133, "learning_rate": 3.248e-05, "loss": 1.296, "step": 819 }, { "epoch": 6.5600000000000005, "grad_norm": 31.023883819580078, "learning_rate": 3.252e-05, "loss": 2.1788, "step": 820 }, { "epoch": 6.568, "grad_norm": 59.43701934814453, "learning_rate": 3.256e-05, "loss": 1.8333, "step": 821 }, { "epoch": 6.576, "grad_norm": 34.441139221191406, "learning_rate": 3.26e-05, "loss": 1.5754, "step": 822 }, { "epoch": 6.584, "grad_norm": 71.94945526123047, "learning_rate": 3.2640000000000006e-05, "loss": 1.8316, "step": 823 }, { "epoch": 6.592, "grad_norm": 68.10340118408203, "learning_rate": 3.268e-05, "loss": 1.7336, "step": 824 }, { "epoch": 6.6, "grad_norm": 48.00246047973633, "learning_rate": 3.272e-05, "loss": 2.1397, "step": 825 }, { "epoch": 6.608, "grad_norm": 32.999267578125, "learning_rate": 3.2760000000000005e-05, "loss": 2.3585, "step": 826 }, { "epoch": 6.616, "grad_norm": 157.42068481445312, "learning_rate": 3.2800000000000004e-05, "loss": 1.4062, "step": 827 }, { "epoch": 6.624, "grad_norm": 188.78790283203125, "learning_rate": 3.2840000000000004e-05, "loss": 1.9857, "step": 828 }, { "epoch": 6.632, "grad_norm": 38.2253532409668, "learning_rate": 3.288e-05, "loss": 1.6773, "step": 829 }, { "epoch": 6.64, "grad_norm": 45.95706558227539, "learning_rate": 3.292e-05, "loss": 2.1078, "step": 830 }, { "epoch": 6.648, "grad_norm": 84.08907318115234, "learning_rate": 3.296e-05, "loss": 1.9729, "step": 831 }, { "epoch": 6.656, "grad_norm": 38.457069396972656, "learning_rate": 3.3e-05, "loss": 1.8553, "step": 832 }, { "epoch": 6.664, "grad_norm": 62.56658935546875, "learning_rate": 3.304e-05, "loss": 1.3685, "step": 833 }, { "epoch": 6.672, "grad_norm": 179.36874389648438, "learning_rate": 3.308e-05, "loss": 1.8533, "step": 834 }, { "epoch": 6.68, "grad_norm": 62.61653137207031, "learning_rate": 3.312e-05, "loss": 1.6822, "step": 835 }, { "epoch": 6.688, "grad_norm": 74.63492584228516, "learning_rate": 3.316e-05, "loss": 1.7864, "step": 836 }, { "epoch": 6.696, "grad_norm": 24.899677276611328, "learning_rate": 3.32e-05, "loss": 1.5332, "step": 837 }, { "epoch": 6.704, "grad_norm": 33.747047424316406, "learning_rate": 3.324e-05, "loss": 1.4587, "step": 838 }, { "epoch": 6.712, "grad_norm": 185.30679321289062, "learning_rate": 3.328e-05, "loss": 1.6167, "step": 839 }, { "epoch": 6.72, "grad_norm": 44.371768951416016, "learning_rate": 3.332e-05, "loss": 1.7213, "step": 840 }, { "epoch": 6.728, "grad_norm": 27.03620719909668, "learning_rate": 3.336e-05, "loss": 1.774, "step": 841 }, { "epoch": 6.736, "grad_norm": 39.57730484008789, "learning_rate": 3.3400000000000005e-05, "loss": 1.5283, "step": 842 }, { "epoch": 6.744, "grad_norm": 57.311309814453125, "learning_rate": 3.344e-05, "loss": 2.1222, "step": 843 }, { "epoch": 6.752, "grad_norm": 45.1869010925293, "learning_rate": 3.348e-05, "loss": 1.7307, "step": 844 }, { "epoch": 6.76, "grad_norm": 27.829334259033203, "learning_rate": 3.3520000000000004e-05, "loss": 1.6039, "step": 845 }, { "epoch": 6.768, "grad_norm": 63.797996520996094, "learning_rate": 3.3560000000000004e-05, "loss": 1.9563, "step": 846 }, { "epoch": 6.776, "grad_norm": 85.63345336914062, "learning_rate": 3.3600000000000004e-05, "loss": 2.4229, "step": 847 }, { "epoch": 6.784, "grad_norm": 173.77847290039062, "learning_rate": 3.3639999999999996e-05, "loss": 1.9077, "step": 848 }, { "epoch": 6.792, "grad_norm": 44.36033630371094, "learning_rate": 3.368e-05, "loss": 2.5226, "step": 849 }, { "epoch": 6.8, "grad_norm": 52.241390228271484, "learning_rate": 3.372e-05, "loss": 1.9035, "step": 850 }, { "epoch": 6.808, "grad_norm": 85.24805450439453, "learning_rate": 3.376e-05, "loss": 1.6212, "step": 851 }, { "epoch": 6.816, "grad_norm": 53.07511901855469, "learning_rate": 3.38e-05, "loss": 1.7276, "step": 852 }, { "epoch": 6.824, "grad_norm": 40.56788635253906, "learning_rate": 3.384e-05, "loss": 1.7195, "step": 853 }, { "epoch": 6.832, "grad_norm": 54.01022720336914, "learning_rate": 3.388e-05, "loss": 2.2612, "step": 854 }, { "epoch": 6.84, "grad_norm": 49.6237678527832, "learning_rate": 3.392e-05, "loss": 1.9904, "step": 855 }, { "epoch": 6.848, "grad_norm": 96.44950866699219, "learning_rate": 3.396e-05, "loss": 1.8439, "step": 856 }, { "epoch": 6.856, "grad_norm": 47.09771728515625, "learning_rate": 3.4000000000000007e-05, "loss": 1.798, "step": 857 }, { "epoch": 6.864, "grad_norm": 24.915817260742188, "learning_rate": 3.404e-05, "loss": 1.3893, "step": 858 }, { "epoch": 6.872, "grad_norm": 94.58625793457031, "learning_rate": 3.408e-05, "loss": 2.6574, "step": 859 }, { "epoch": 6.88, "grad_norm": 28.334306716918945, "learning_rate": 3.412e-05, "loss": 1.8105, "step": 860 }, { "epoch": 6.888, "grad_norm": 44.45904541015625, "learning_rate": 3.4160000000000005e-05, "loss": 1.9174, "step": 861 }, { "epoch": 6.896, "grad_norm": 73.86759185791016, "learning_rate": 3.4200000000000005e-05, "loss": 1.8844, "step": 862 }, { "epoch": 6.904, "grad_norm": 22.482481002807617, "learning_rate": 3.424e-05, "loss": 2.2396, "step": 863 }, { "epoch": 6.912, "grad_norm": 311.46966552734375, "learning_rate": 3.4280000000000004e-05, "loss": 3.0571, "step": 864 }, { "epoch": 6.92, "grad_norm": 82.01093292236328, "learning_rate": 3.4320000000000003e-05, "loss": 1.762, "step": 865 }, { "epoch": 6.928, "grad_norm": 65.88276672363281, "learning_rate": 3.436e-05, "loss": 1.763, "step": 866 }, { "epoch": 6.936, "grad_norm": 30.81038475036621, "learning_rate": 3.4399999999999996e-05, "loss": 2.2544, "step": 867 }, { "epoch": 6.944, "grad_norm": 47.44786071777344, "learning_rate": 3.444e-05, "loss": 1.7654, "step": 868 }, { "epoch": 6.952, "grad_norm": 29.534317016601562, "learning_rate": 3.448e-05, "loss": 1.8858, "step": 869 }, { "epoch": 6.96, "grad_norm": 81.3572006225586, "learning_rate": 3.452e-05, "loss": 1.5552, "step": 870 }, { "epoch": 6.968, "grad_norm": 31.271419525146484, "learning_rate": 3.456e-05, "loss": 3.0414, "step": 871 }, { "epoch": 6.976, "grad_norm": 21.186683654785156, "learning_rate": 3.46e-05, "loss": 1.7304, "step": 872 }, { "epoch": 6.984, "grad_norm": 35.44110107421875, "learning_rate": 3.464e-05, "loss": 1.7542, "step": 873 }, { "epoch": 6.992, "grad_norm": 46.48596954345703, "learning_rate": 3.468e-05, "loss": 1.8143, "step": 874 }, { "epoch": 7.0, "grad_norm": 70.27835083007812, "learning_rate": 3.472e-05, "loss": 1.5046, "step": 875 }, { "epoch": 7.0, "eval_loss": 1.8635473251342773, "eval_map": 0.0933, "eval_map_50": 0.2033, "eval_map_75": 0.0817, "eval_map_Coverall": 0.3557, "eval_map_Face_Shield": 0.0, "eval_map_Gloves": 0.0472, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0638, "eval_map_large": 0.0881, "eval_map_medium": 0.03, "eval_map_small": 0.0395, "eval_mar_1": 0.1009, "eval_mar_10": 0.2178, "eval_mar_100": 0.2546, "eval_mar_100_Coverall": 0.6533, "eval_mar_100_Face_Shield": 0.0, "eval_mar_100_Gloves": 0.2525, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.3673, "eval_mar_large": 0.2127, "eval_mar_medium": 0.1605, "eval_mar_small": 0.0814, "eval_runtime": 0.8976, "eval_samples_per_second": 32.307, "eval_steps_per_second": 2.228, "step": 875 }, { "epoch": 7.008, "grad_norm": 41.4987907409668, "learning_rate": 3.4760000000000006e-05, "loss": 1.7619, "step": 876 }, { "epoch": 7.016, "grad_norm": 62.93650817871094, "learning_rate": 3.48e-05, "loss": 1.9334, "step": 877 }, { "epoch": 7.024, "grad_norm": 63.157596588134766, "learning_rate": 3.484e-05, "loss": 1.6682, "step": 878 }, { "epoch": 7.032, "grad_norm": 62.77547073364258, "learning_rate": 3.4880000000000005e-05, "loss": 1.6214, "step": 879 }, { "epoch": 7.04, "grad_norm": 41.23252487182617, "learning_rate": 3.4920000000000004e-05, "loss": 1.8375, "step": 880 }, { "epoch": 7.048, "grad_norm": 89.505126953125, "learning_rate": 3.4960000000000004e-05, "loss": 1.7658, "step": 881 }, { "epoch": 7.056, "grad_norm": 21.998075485229492, "learning_rate": 3.5e-05, "loss": 1.6052, "step": 882 }, { "epoch": 7.064, "grad_norm": 38.45775604248047, "learning_rate": 3.504e-05, "loss": 1.4581, "step": 883 }, { "epoch": 7.072, "grad_norm": 43.661006927490234, "learning_rate": 3.508e-05, "loss": 2.2765, "step": 884 }, { "epoch": 7.08, "grad_norm": 33.14508056640625, "learning_rate": 3.512e-05, "loss": 1.8462, "step": 885 }, { "epoch": 7.088, "grad_norm": 16.927309036254883, "learning_rate": 3.516e-05, "loss": 1.6089, "step": 886 }, { "epoch": 7.096, "grad_norm": 41.8663215637207, "learning_rate": 3.52e-05, "loss": 1.5919, "step": 887 }, { "epoch": 7.104, "grad_norm": 143.2750701904297, "learning_rate": 3.524e-05, "loss": 1.8559, "step": 888 }, { "epoch": 7.112, "grad_norm": 52.73862075805664, "learning_rate": 3.528e-05, "loss": 1.5913, "step": 889 }, { "epoch": 7.12, "grad_norm": 94.35209655761719, "learning_rate": 3.532e-05, "loss": 1.5398, "step": 890 }, { "epoch": 7.128, "grad_norm": 50.364688873291016, "learning_rate": 3.536000000000001e-05, "loss": 1.4541, "step": 891 }, { "epoch": 7.136, "grad_norm": 70.84886932373047, "learning_rate": 3.54e-05, "loss": 1.6287, "step": 892 }, { "epoch": 7.144, "grad_norm": 59.443782806396484, "learning_rate": 3.544e-05, "loss": 1.8425, "step": 893 }, { "epoch": 7.152, "grad_norm": 56.07847595214844, "learning_rate": 3.548e-05, "loss": 1.381, "step": 894 }, { "epoch": 7.16, "grad_norm": 38.44703674316406, "learning_rate": 3.5520000000000006e-05, "loss": 1.5565, "step": 895 }, { "epoch": 7.168, "grad_norm": 35.204376220703125, "learning_rate": 3.5560000000000005e-05, "loss": 2.1154, "step": 896 }, { "epoch": 7.176, "grad_norm": 38.97987747192383, "learning_rate": 3.56e-05, "loss": 1.9028, "step": 897 }, { "epoch": 7.184, "grad_norm": 48.14381408691406, "learning_rate": 3.5640000000000004e-05, "loss": 1.7498, "step": 898 }, { "epoch": 7.192, "grad_norm": 100.71038818359375, "learning_rate": 3.5680000000000004e-05, "loss": 1.5704, "step": 899 }, { "epoch": 7.2, "grad_norm": 38.96990203857422, "learning_rate": 3.5720000000000004e-05, "loss": 1.7156, "step": 900 }, { "epoch": 7.208, "grad_norm": 87.4403305053711, "learning_rate": 3.5759999999999996e-05, "loss": 1.6562, "step": 901 }, { "epoch": 7.216, "grad_norm": 31.052942276000977, "learning_rate": 3.58e-05, "loss": 1.7939, "step": 902 }, { "epoch": 7.224, "grad_norm": 33.88554382324219, "learning_rate": 3.584e-05, "loss": 1.7584, "step": 903 }, { "epoch": 7.232, "grad_norm": 33.32987594604492, "learning_rate": 3.588e-05, "loss": 1.9764, "step": 904 }, { "epoch": 7.24, "grad_norm": 322.1181335449219, "learning_rate": 3.592e-05, "loss": 1.8099, "step": 905 }, { "epoch": 7.248, "grad_norm": 30.327119827270508, "learning_rate": 3.596e-05, "loss": 1.774, "step": 906 }, { "epoch": 7.256, "grad_norm": 292.62939453125, "learning_rate": 3.6e-05, "loss": 2.2383, "step": 907 }, { "epoch": 7.264, "grad_norm": 27.846595764160156, "learning_rate": 3.604e-05, "loss": 1.6925, "step": 908 }, { "epoch": 7.272, "grad_norm": 42.536624908447266, "learning_rate": 3.608e-05, "loss": 2.513, "step": 909 }, { "epoch": 7.28, "grad_norm": 39.16312789916992, "learning_rate": 3.6120000000000007e-05, "loss": 1.4374, "step": 910 }, { "epoch": 7.288, "grad_norm": 36.262691497802734, "learning_rate": 3.616e-05, "loss": 1.4323, "step": 911 }, { "epoch": 7.296, "grad_norm": 192.6688690185547, "learning_rate": 3.62e-05, "loss": 2.1203, "step": 912 }, { "epoch": 7.304, "grad_norm": 198.5966796875, "learning_rate": 3.624e-05, "loss": 1.8619, "step": 913 }, { "epoch": 7.312, "grad_norm": 34.11410903930664, "learning_rate": 3.6280000000000005e-05, "loss": 1.5924, "step": 914 }, { "epoch": 7.32, "grad_norm": 56.692596435546875, "learning_rate": 3.6320000000000005e-05, "loss": 1.4279, "step": 915 }, { "epoch": 7.328, "grad_norm": 23.063617706298828, "learning_rate": 3.636e-05, "loss": 2.0432, "step": 916 }, { "epoch": 7.336, "grad_norm": 37.83278274536133, "learning_rate": 3.6400000000000004e-05, "loss": 1.9394, "step": 917 }, { "epoch": 7.344, "grad_norm": 41.70407485961914, "learning_rate": 3.6440000000000003e-05, "loss": 1.4003, "step": 918 }, { "epoch": 7.352, "grad_norm": 24.856597900390625, "learning_rate": 3.648e-05, "loss": 1.463, "step": 919 }, { "epoch": 7.36, "grad_norm": 71.28679656982422, "learning_rate": 3.652e-05, "loss": 1.5236, "step": 920 }, { "epoch": 7.368, "grad_norm": 54.75731658935547, "learning_rate": 3.656e-05, "loss": 1.5652, "step": 921 }, { "epoch": 7.376, "grad_norm": 35.52896499633789, "learning_rate": 3.66e-05, "loss": 1.7141, "step": 922 }, { "epoch": 7.384, "grad_norm": 43.781436920166016, "learning_rate": 3.664e-05, "loss": 1.6541, "step": 923 }, { "epoch": 7.392, "grad_norm": 37.32955551147461, "learning_rate": 3.668e-05, "loss": 1.8216, "step": 924 }, { "epoch": 7.4, "grad_norm": 18.679658889770508, "learning_rate": 3.672000000000001e-05, "loss": 1.7106, "step": 925 }, { "epoch": 7.408, "grad_norm": 553.292724609375, "learning_rate": 3.676e-05, "loss": 1.7809, "step": 926 }, { "epoch": 7.416, "grad_norm": 38.54877853393555, "learning_rate": 3.68e-05, "loss": 2.1387, "step": 927 }, { "epoch": 7.424, "grad_norm": 42.700069427490234, "learning_rate": 3.684e-05, "loss": 1.792, "step": 928 }, { "epoch": 7.432, "grad_norm": 57.8881721496582, "learning_rate": 3.6880000000000006e-05, "loss": 1.2096, "step": 929 }, { "epoch": 7.44, "grad_norm": 41.05021667480469, "learning_rate": 3.692e-05, "loss": 1.3961, "step": 930 }, { "epoch": 7.448, "grad_norm": 34.92211151123047, "learning_rate": 3.696e-05, "loss": 1.9105, "step": 931 }, { "epoch": 7.456, "grad_norm": 53.51667785644531, "learning_rate": 3.7e-05, "loss": 2.1009, "step": 932 }, { "epoch": 7.464, "grad_norm": 47.09217071533203, "learning_rate": 3.7040000000000005e-05, "loss": 2.0219, "step": 933 }, { "epoch": 7.4719999999999995, "grad_norm": 20.466903686523438, "learning_rate": 3.7080000000000004e-05, "loss": 1.9426, "step": 934 }, { "epoch": 7.48, "grad_norm": 23.353599548339844, "learning_rate": 3.712e-05, "loss": 1.9236, "step": 935 }, { "epoch": 7.4879999999999995, "grad_norm": 42.60504150390625, "learning_rate": 3.716e-05, "loss": 2.0725, "step": 936 }, { "epoch": 7.496, "grad_norm": 28.139101028442383, "learning_rate": 3.72e-05, "loss": 1.7832, "step": 937 }, { "epoch": 7.504, "grad_norm": 74.86307525634766, "learning_rate": 3.724e-05, "loss": 1.5627, "step": 938 }, { "epoch": 7.5120000000000005, "grad_norm": 23.916378021240234, "learning_rate": 3.728e-05, "loss": 1.6304, "step": 939 }, { "epoch": 7.52, "grad_norm": 28.104074478149414, "learning_rate": 3.732e-05, "loss": 1.5515, "step": 940 }, { "epoch": 7.5280000000000005, "grad_norm": 54.570167541503906, "learning_rate": 3.736e-05, "loss": 2.4997, "step": 941 }, { "epoch": 7.536, "grad_norm": 160.55728149414062, "learning_rate": 3.74e-05, "loss": 1.7766, "step": 942 }, { "epoch": 7.5440000000000005, "grad_norm": 73.4301986694336, "learning_rate": 3.744e-05, "loss": 1.5699, "step": 943 }, { "epoch": 7.552, "grad_norm": 36.959468841552734, "learning_rate": 3.748000000000001e-05, "loss": 3.4342, "step": 944 }, { "epoch": 7.5600000000000005, "grad_norm": 30.992952346801758, "learning_rate": 3.752e-05, "loss": 1.5097, "step": 945 }, { "epoch": 7.568, "grad_norm": 48.06689453125, "learning_rate": 3.756e-05, "loss": 1.6734, "step": 946 }, { "epoch": 7.576, "grad_norm": 38.336456298828125, "learning_rate": 3.76e-05, "loss": 2.049, "step": 947 }, { "epoch": 7.584, "grad_norm": 42.6774787902832, "learning_rate": 3.7640000000000006e-05, "loss": 1.7993, "step": 948 }, { "epoch": 7.592, "grad_norm": 64.4164810180664, "learning_rate": 3.7680000000000005e-05, "loss": 1.575, "step": 949 }, { "epoch": 7.6, "grad_norm": 75.75891876220703, "learning_rate": 3.772e-05, "loss": 1.5894, "step": 950 }, { "epoch": 7.608, "grad_norm": 137.67550659179688, "learning_rate": 3.776e-05, "loss": 2.0466, "step": 951 }, { "epoch": 7.616, "grad_norm": 40.18898010253906, "learning_rate": 3.7800000000000004e-05, "loss": 1.3857, "step": 952 }, { "epoch": 7.624, "grad_norm": 56.38101577758789, "learning_rate": 3.7840000000000004e-05, "loss": 2.4877, "step": 953 }, { "epoch": 7.632, "grad_norm": 31.547622680664062, "learning_rate": 3.788e-05, "loss": 1.3882, "step": 954 }, { "epoch": 7.64, "grad_norm": 35.97997283935547, "learning_rate": 3.792e-05, "loss": 3.0289, "step": 955 }, { "epoch": 7.648, "grad_norm": 22.983070373535156, "learning_rate": 3.796e-05, "loss": 1.8289, "step": 956 }, { "epoch": 7.656, "grad_norm": 75.73885345458984, "learning_rate": 3.8e-05, "loss": 1.7089, "step": 957 }, { "epoch": 7.664, "grad_norm": 52.999061584472656, "learning_rate": 3.804e-05, "loss": 1.6837, "step": 958 }, { "epoch": 7.672, "grad_norm": 60.12261199951172, "learning_rate": 3.808e-05, "loss": 1.9381, "step": 959 }, { "epoch": 7.68, "grad_norm": 33.95718765258789, "learning_rate": 3.812e-05, "loss": 2.0595, "step": 960 }, { "epoch": 7.688, "grad_norm": 36.294185638427734, "learning_rate": 3.816e-05, "loss": 2.2453, "step": 961 }, { "epoch": 7.696, "grad_norm": 32.91172790527344, "learning_rate": 3.82e-05, "loss": 1.806, "step": 962 }, { "epoch": 7.704, "grad_norm": 26.855613708496094, "learning_rate": 3.8240000000000007e-05, "loss": 1.9775, "step": 963 }, { "epoch": 7.712, "grad_norm": 38.17164993286133, "learning_rate": 3.828e-05, "loss": 1.37, "step": 964 }, { "epoch": 7.72, "grad_norm": 66.99996948242188, "learning_rate": 3.832e-05, "loss": 1.7959, "step": 965 }, { "epoch": 7.728, "grad_norm": 40.10707092285156, "learning_rate": 3.836e-05, "loss": 1.7814, "step": 966 }, { "epoch": 7.736, "grad_norm": 40.50642395019531, "learning_rate": 3.8400000000000005e-05, "loss": 2.1128, "step": 967 }, { "epoch": 7.744, "grad_norm": 41.78212356567383, "learning_rate": 3.8440000000000005e-05, "loss": 1.2403, "step": 968 }, { "epoch": 7.752, "grad_norm": 22.944734573364258, "learning_rate": 3.848e-05, "loss": 1.8813, "step": 969 }, { "epoch": 7.76, "grad_norm": 66.62805938720703, "learning_rate": 3.8520000000000004e-05, "loss": 1.7326, "step": 970 }, { "epoch": 7.768, "grad_norm": 112.28273010253906, "learning_rate": 3.8560000000000004e-05, "loss": 1.461, "step": 971 }, { "epoch": 7.776, "grad_norm": 32.709388732910156, "learning_rate": 3.86e-05, "loss": 1.5841, "step": 972 }, { "epoch": 7.784, "grad_norm": 33.62651062011719, "learning_rate": 3.864e-05, "loss": 1.5276, "step": 973 }, { "epoch": 7.792, "grad_norm": 27.54905891418457, "learning_rate": 3.868e-05, "loss": 1.9831, "step": 974 }, { "epoch": 7.8, "grad_norm": 100.2403564453125, "learning_rate": 3.872e-05, "loss": 2.5011, "step": 975 }, { "epoch": 7.808, "grad_norm": 95.94187927246094, "learning_rate": 3.876e-05, "loss": 2.0395, "step": 976 }, { "epoch": 7.816, "grad_norm": 45.280372619628906, "learning_rate": 3.88e-05, "loss": 1.5952, "step": 977 }, { "epoch": 7.824, "grad_norm": 24.13416290283203, "learning_rate": 3.884e-05, "loss": 1.8731, "step": 978 }, { "epoch": 7.832, "grad_norm": 62.834468841552734, "learning_rate": 3.888e-05, "loss": 1.4483, "step": 979 }, { "epoch": 7.84, "grad_norm": 43.39765167236328, "learning_rate": 3.892e-05, "loss": 1.6652, "step": 980 }, { "epoch": 7.848, "grad_norm": 36.28022384643555, "learning_rate": 3.896e-05, "loss": 1.5915, "step": 981 }, { "epoch": 7.856, "grad_norm": 74.50053405761719, "learning_rate": 3.9000000000000006e-05, "loss": 1.9523, "step": 982 }, { "epoch": 7.864, "grad_norm": 33.76795959472656, "learning_rate": 3.9040000000000006e-05, "loss": 1.7647, "step": 983 }, { "epoch": 7.872, "grad_norm": 31.561397552490234, "learning_rate": 3.908e-05, "loss": 1.9155, "step": 984 }, { "epoch": 7.88, "grad_norm": 48.41033172607422, "learning_rate": 3.912e-05, "loss": 1.8109, "step": 985 }, { "epoch": 7.888, "grad_norm": 50.8421516418457, "learning_rate": 3.9160000000000005e-05, "loss": 2.7684, "step": 986 }, { "epoch": 7.896, "grad_norm": 27.250818252563477, "learning_rate": 3.9200000000000004e-05, "loss": 1.9612, "step": 987 }, { "epoch": 7.904, "grad_norm": 933.0300903320312, "learning_rate": 3.9240000000000004e-05, "loss": 1.676, "step": 988 }, { "epoch": 7.912, "grad_norm": 111.62077331542969, "learning_rate": 3.9280000000000003e-05, "loss": 1.67, "step": 989 }, { "epoch": 7.92, "grad_norm": 43.75034713745117, "learning_rate": 3.932e-05, "loss": 1.8565, "step": 990 }, { "epoch": 7.928, "grad_norm": 33.37947082519531, "learning_rate": 3.936e-05, "loss": 2.2448, "step": 991 }, { "epoch": 7.936, "grad_norm": 32.948341369628906, "learning_rate": 3.94e-05, "loss": 1.4842, "step": 992 }, { "epoch": 7.944, "grad_norm": 112.30684661865234, "learning_rate": 3.944e-05, "loss": 2.2233, "step": 993 }, { "epoch": 7.952, "grad_norm": 35.135162353515625, "learning_rate": 3.948e-05, "loss": 1.5722, "step": 994 }, { "epoch": 7.96, "grad_norm": 31.724262237548828, "learning_rate": 3.952e-05, "loss": 1.5765, "step": 995 }, { "epoch": 7.968, "grad_norm": 86.57245635986328, "learning_rate": 3.956e-05, "loss": 1.5209, "step": 996 }, { "epoch": 7.976, "grad_norm": 47.26936721801758, "learning_rate": 3.960000000000001e-05, "loss": 2.0752, "step": 997 }, { "epoch": 7.984, "grad_norm": 91.80619049072266, "learning_rate": 3.964e-05, "loss": 1.5518, "step": 998 }, { "epoch": 7.992, "grad_norm": 73.01097106933594, "learning_rate": 3.968e-05, "loss": 1.7636, "step": 999 }, { "epoch": 8.0, "grad_norm": 74.47737121582031, "learning_rate": 3.972e-05, "loss": 1.6603, "step": 1000 }, { "epoch": 8.0, "eval_loss": 1.9016398191452026, "eval_map": 0.0939, "eval_map_50": 0.2124, "eval_map_75": 0.0788, "eval_map_Coverall": 0.3591, "eval_map_Face_Shield": 0.0, "eval_map_Gloves": 0.0628, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0474, "eval_map_large": 0.1037, "eval_map_medium": 0.0273, "eval_map_small": 0.0386, "eval_mar_1": 0.0942, "eval_mar_10": 0.1965, "eval_mar_100": 0.2228, "eval_mar_100_Coverall": 0.5822, "eval_mar_100_Face_Shield": 0.0, "eval_mar_100_Gloves": 0.2721, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.2596, "eval_mar_large": 0.2299, "eval_mar_medium": 0.1347, "eval_mar_small": 0.0665, "eval_runtime": 0.9098, "eval_samples_per_second": 31.875, "eval_steps_per_second": 2.198, "step": 1000 }, { "epoch": 8.008, "grad_norm": 103.3221664428711, "learning_rate": 3.9760000000000006e-05, "loss": 1.8812, "step": 1001 }, { "epoch": 8.016, "grad_norm": 30.62323570251465, "learning_rate": 3.9800000000000005e-05, "loss": 2.1974, "step": 1002 }, { "epoch": 8.024, "grad_norm": 90.23351287841797, "learning_rate": 3.984e-05, "loss": 1.6207, "step": 1003 }, { "epoch": 8.032, "grad_norm": 51.917110443115234, "learning_rate": 3.988e-05, "loss": 2.2992, "step": 1004 }, { "epoch": 8.04, "grad_norm": 33.516944885253906, "learning_rate": 3.9920000000000004e-05, "loss": 1.8147, "step": 1005 }, { "epoch": 8.048, "grad_norm": 67.17483520507812, "learning_rate": 3.9960000000000004e-05, "loss": 1.7384, "step": 1006 }, { "epoch": 8.056, "grad_norm": 110.9447021484375, "learning_rate": 4e-05, "loss": 1.7515, "step": 1007 }, { "epoch": 8.064, "grad_norm": 85.10989379882812, "learning_rate": 4.004e-05, "loss": 1.7249, "step": 1008 }, { "epoch": 8.072, "grad_norm": 43.35692596435547, "learning_rate": 4.008e-05, "loss": 1.9146, "step": 1009 }, { "epoch": 8.08, "grad_norm": 45.42159652709961, "learning_rate": 4.012e-05, "loss": 1.9125, "step": 1010 }, { "epoch": 8.088, "grad_norm": 72.42352294921875, "learning_rate": 4.016e-05, "loss": 1.7756, "step": 1011 }, { "epoch": 8.096, "grad_norm": 48.217437744140625, "learning_rate": 4.02e-05, "loss": 1.6061, "step": 1012 }, { "epoch": 8.104, "grad_norm": 55.91665267944336, "learning_rate": 4.024e-05, "loss": 1.9121, "step": 1013 }, { "epoch": 8.112, "grad_norm": 29.79120635986328, "learning_rate": 4.028e-05, "loss": 1.7056, "step": 1014 }, { "epoch": 8.12, "grad_norm": 31.847097396850586, "learning_rate": 4.032e-05, "loss": 2.0421, "step": 1015 }, { "epoch": 8.128, "grad_norm": 29.91244888305664, "learning_rate": 4.0360000000000007e-05, "loss": 1.9639, "step": 1016 }, { "epoch": 8.136, "grad_norm": 59.22207260131836, "learning_rate": 4.0400000000000006e-05, "loss": 1.8252, "step": 1017 }, { "epoch": 8.144, "grad_norm": 30.04961395263672, "learning_rate": 4.044e-05, "loss": 1.898, "step": 1018 }, { "epoch": 8.152, "grad_norm": 71.8339614868164, "learning_rate": 4.048e-05, "loss": 1.6681, "step": 1019 }, { "epoch": 8.16, "grad_norm": 52.53684616088867, "learning_rate": 4.0520000000000005e-05, "loss": 1.6392, "step": 1020 }, { "epoch": 8.168, "grad_norm": 107.84783172607422, "learning_rate": 4.0560000000000005e-05, "loss": 1.8253, "step": 1021 }, { "epoch": 8.176, "grad_norm": 77.24821472167969, "learning_rate": 4.0600000000000004e-05, "loss": 1.9765, "step": 1022 }, { "epoch": 8.184, "grad_norm": 35.85078430175781, "learning_rate": 4.064e-05, "loss": 1.5153, "step": 1023 }, { "epoch": 8.192, "grad_norm": 49.192928314208984, "learning_rate": 4.0680000000000004e-05, "loss": 2.0469, "step": 1024 }, { "epoch": 8.2, "grad_norm": 57.25447082519531, "learning_rate": 4.072e-05, "loss": 1.6789, "step": 1025 }, { "epoch": 8.208, "grad_norm": 49.506919860839844, "learning_rate": 4.076e-05, "loss": 1.5269, "step": 1026 }, { "epoch": 8.216, "grad_norm": 89.94831085205078, "learning_rate": 4.08e-05, "loss": 1.6138, "step": 1027 }, { "epoch": 8.224, "grad_norm": 43.91194152832031, "learning_rate": 4.084e-05, "loss": 1.7572, "step": 1028 }, { "epoch": 8.232, "grad_norm": 58.925987243652344, "learning_rate": 4.088e-05, "loss": 1.6902, "step": 1029 }, { "epoch": 8.24, "grad_norm": 32.6387939453125, "learning_rate": 4.092e-05, "loss": 2.3948, "step": 1030 }, { "epoch": 8.248, "grad_norm": 113.8521499633789, "learning_rate": 4.096e-05, "loss": 1.5464, "step": 1031 }, { "epoch": 8.256, "grad_norm": 48.1077995300293, "learning_rate": 4.1e-05, "loss": 1.8446, "step": 1032 }, { "epoch": 8.264, "grad_norm": 282.6051940917969, "learning_rate": 4.104e-05, "loss": 1.7837, "step": 1033 }, { "epoch": 8.272, "grad_norm": 40.74654769897461, "learning_rate": 4.108e-05, "loss": 1.9717, "step": 1034 }, { "epoch": 8.28, "grad_norm": 49.03605651855469, "learning_rate": 4.1120000000000006e-05, "loss": 1.752, "step": 1035 }, { "epoch": 8.288, "grad_norm": 29.829410552978516, "learning_rate": 4.1160000000000006e-05, "loss": 2.0508, "step": 1036 }, { "epoch": 8.296, "grad_norm": 75.78207397460938, "learning_rate": 4.12e-05, "loss": 2.6904, "step": 1037 }, { "epoch": 8.304, "grad_norm": 86.71575164794922, "learning_rate": 4.124e-05, "loss": 1.9598, "step": 1038 }, { "epoch": 8.312, "grad_norm": 82.31941986083984, "learning_rate": 4.1280000000000005e-05, "loss": 2.0075, "step": 1039 }, { "epoch": 8.32, "grad_norm": 121.77545166015625, "learning_rate": 4.1320000000000004e-05, "loss": 1.8607, "step": 1040 }, { "epoch": 8.328, "grad_norm": 42.357913970947266, "learning_rate": 4.1360000000000004e-05, "loss": 1.7707, "step": 1041 }, { "epoch": 8.336, "grad_norm": 106.06333923339844, "learning_rate": 4.14e-05, "loss": 2.2956, "step": 1042 }, { "epoch": 8.344, "grad_norm": 116.29046630859375, "learning_rate": 4.144e-05, "loss": 1.7658, "step": 1043 }, { "epoch": 8.352, "grad_norm": 116.55040740966797, "learning_rate": 4.148e-05, "loss": 1.7957, "step": 1044 }, { "epoch": 8.36, "grad_norm": 44.26662826538086, "learning_rate": 4.152e-05, "loss": 1.599, "step": 1045 }, { "epoch": 8.368, "grad_norm": 39.48909378051758, "learning_rate": 4.156e-05, "loss": 1.3238, "step": 1046 }, { "epoch": 8.376, "grad_norm": 43.50898361206055, "learning_rate": 4.16e-05, "loss": 2.2835, "step": 1047 }, { "epoch": 8.384, "grad_norm": 56.5804443359375, "learning_rate": 4.164e-05, "loss": 1.7528, "step": 1048 }, { "epoch": 8.392, "grad_norm": 25.55586051940918, "learning_rate": 4.168e-05, "loss": 1.4458, "step": 1049 }, { "epoch": 8.4, "grad_norm": 76.53412628173828, "learning_rate": 4.172e-05, "loss": 1.4933, "step": 1050 }, { "epoch": 8.408, "grad_norm": 38.94574737548828, "learning_rate": 4.176000000000001e-05, "loss": 1.5405, "step": 1051 }, { "epoch": 8.416, "grad_norm": 27.674579620361328, "learning_rate": 4.18e-05, "loss": 1.5148, "step": 1052 }, { "epoch": 8.424, "grad_norm": 38.60129165649414, "learning_rate": 4.184e-05, "loss": 1.6208, "step": 1053 }, { "epoch": 8.432, "grad_norm": 45.078277587890625, "learning_rate": 4.1880000000000006e-05, "loss": 1.909, "step": 1054 }, { "epoch": 8.44, "grad_norm": 25.802885055541992, "learning_rate": 4.1920000000000005e-05, "loss": 2.0037, "step": 1055 }, { "epoch": 8.448, "grad_norm": 40.14674758911133, "learning_rate": 4.196e-05, "loss": 2.0083, "step": 1056 }, { "epoch": 8.456, "grad_norm": 61.34543228149414, "learning_rate": 4.2e-05, "loss": 1.5798, "step": 1057 }, { "epoch": 8.464, "grad_norm": 57.6539192199707, "learning_rate": 4.2040000000000004e-05, "loss": 1.6697, "step": 1058 }, { "epoch": 8.472, "grad_norm": 49.15740203857422, "learning_rate": 4.2080000000000004e-05, "loss": 1.4442, "step": 1059 }, { "epoch": 8.48, "grad_norm": 37.14937973022461, "learning_rate": 4.212e-05, "loss": 1.7621, "step": 1060 }, { "epoch": 8.488, "grad_norm": 84.7442626953125, "learning_rate": 4.2159999999999996e-05, "loss": 1.2261, "step": 1061 }, { "epoch": 8.496, "grad_norm": 52.36440658569336, "learning_rate": 4.22e-05, "loss": 1.7423, "step": 1062 }, { "epoch": 8.504, "grad_norm": 75.54283905029297, "learning_rate": 4.224e-05, "loss": 1.8237, "step": 1063 }, { "epoch": 8.512, "grad_norm": 39.17848205566406, "learning_rate": 4.228e-05, "loss": 1.5581, "step": 1064 }, { "epoch": 8.52, "grad_norm": 41.33695983886719, "learning_rate": 4.232e-05, "loss": 2.3326, "step": 1065 }, { "epoch": 8.528, "grad_norm": 151.7939910888672, "learning_rate": 4.236e-05, "loss": 1.2563, "step": 1066 }, { "epoch": 8.536, "grad_norm": 53.477684020996094, "learning_rate": 4.24e-05, "loss": 1.8033, "step": 1067 }, { "epoch": 8.544, "grad_norm": 34.41264343261719, "learning_rate": 4.244e-05, "loss": 1.4122, "step": 1068 }, { "epoch": 8.552, "grad_norm": 56.34254455566406, "learning_rate": 4.248e-05, "loss": 1.755, "step": 1069 }, { "epoch": 8.56, "grad_norm": 46.79838180541992, "learning_rate": 4.2520000000000006e-05, "loss": 1.5388, "step": 1070 }, { "epoch": 8.568, "grad_norm": 29.776121139526367, "learning_rate": 4.256e-05, "loss": 1.5525, "step": 1071 }, { "epoch": 8.576, "grad_norm": 57.73811340332031, "learning_rate": 4.26e-05, "loss": 2.0386, "step": 1072 }, { "epoch": 8.584, "grad_norm": 193.00936889648438, "learning_rate": 4.2640000000000005e-05, "loss": 1.5475, "step": 1073 }, { "epoch": 8.592, "grad_norm": 63.2039680480957, "learning_rate": 4.2680000000000005e-05, "loss": 1.6115, "step": 1074 }, { "epoch": 8.6, "grad_norm": 118.80516815185547, "learning_rate": 4.2720000000000004e-05, "loss": 2.2799, "step": 1075 }, { "epoch": 8.608, "grad_norm": 47.19162368774414, "learning_rate": 4.276e-05, "loss": 2.1656, "step": 1076 }, { "epoch": 8.616, "grad_norm": 94.49764251708984, "learning_rate": 4.2800000000000004e-05, "loss": 1.742, "step": 1077 }, { "epoch": 8.624, "grad_norm": 123.6723403930664, "learning_rate": 4.284e-05, "loss": 1.44, "step": 1078 }, { "epoch": 8.632, "grad_norm": 70.802734375, "learning_rate": 4.288e-05, "loss": 1.8454, "step": 1079 }, { "epoch": 8.64, "grad_norm": 34.20899200439453, "learning_rate": 4.292e-05, "loss": 1.7142, "step": 1080 }, { "epoch": 8.648, "grad_norm": 117.17802429199219, "learning_rate": 4.296e-05, "loss": 1.707, "step": 1081 }, { "epoch": 8.656, "grad_norm": 46.618438720703125, "learning_rate": 4.3e-05, "loss": 2.0909, "step": 1082 }, { "epoch": 8.664, "grad_norm": 38.75689697265625, "learning_rate": 4.304e-05, "loss": 1.5729, "step": 1083 }, { "epoch": 8.672, "grad_norm": 110.13563537597656, "learning_rate": 4.308e-05, "loss": 2.0333, "step": 1084 }, { "epoch": 8.68, "grad_norm": 19.59214973449707, "learning_rate": 4.312000000000001e-05, "loss": 2.0988, "step": 1085 }, { "epoch": 8.688, "grad_norm": 38.22223663330078, "learning_rate": 4.316e-05, "loss": 2.0575, "step": 1086 }, { "epoch": 8.696, "grad_norm": 35.03896713256836, "learning_rate": 4.32e-05, "loss": 1.7605, "step": 1087 }, { "epoch": 8.704, "grad_norm": 25.797988891601562, "learning_rate": 4.324e-05, "loss": 2.1099, "step": 1088 }, { "epoch": 8.712, "grad_norm": 201.4046630859375, "learning_rate": 4.3280000000000006e-05, "loss": 1.8706, "step": 1089 }, { "epoch": 8.72, "grad_norm": 28.7613525390625, "learning_rate": 4.332e-05, "loss": 1.5863, "step": 1090 }, { "epoch": 8.728, "grad_norm": 30.931379318237305, "learning_rate": 4.336e-05, "loss": 1.8445, "step": 1091 }, { "epoch": 8.736, "grad_norm": 54.155147552490234, "learning_rate": 4.3400000000000005e-05, "loss": 1.7214, "step": 1092 }, { "epoch": 8.744, "grad_norm": 149.74325561523438, "learning_rate": 4.3440000000000004e-05, "loss": 1.793, "step": 1093 }, { "epoch": 8.752, "grad_norm": 34.01882553100586, "learning_rate": 4.3480000000000004e-05, "loss": 1.8385, "step": 1094 }, { "epoch": 8.76, "grad_norm": 55.24512481689453, "learning_rate": 4.352e-05, "loss": 1.6481, "step": 1095 }, { "epoch": 8.768, "grad_norm": 180.62457275390625, "learning_rate": 4.356e-05, "loss": 2.3202, "step": 1096 }, { "epoch": 8.776, "grad_norm": 100.86471557617188, "learning_rate": 4.36e-05, "loss": 2.1362, "step": 1097 }, { "epoch": 8.784, "grad_norm": 118.03572082519531, "learning_rate": 4.364e-05, "loss": 1.6304, "step": 1098 }, { "epoch": 8.792, "grad_norm": 31.954761505126953, "learning_rate": 4.368e-05, "loss": 1.6957, "step": 1099 }, { "epoch": 8.8, "grad_norm": 29.48064613342285, "learning_rate": 4.372e-05, "loss": 1.384, "step": 1100 }, { "epoch": 8.808, "grad_norm": 168.66659545898438, "learning_rate": 4.376e-05, "loss": 1.9071, "step": 1101 }, { "epoch": 8.816, "grad_norm": 49.323890686035156, "learning_rate": 4.38e-05, "loss": 1.8169, "step": 1102 }, { "epoch": 8.824, "grad_norm": 82.67013549804688, "learning_rate": 4.384e-05, "loss": 1.7584, "step": 1103 }, { "epoch": 8.832, "grad_norm": 492.2800598144531, "learning_rate": 4.388000000000001e-05, "loss": 2.1379, "step": 1104 }, { "epoch": 8.84, "grad_norm": 26.778118133544922, "learning_rate": 4.392e-05, "loss": 1.5864, "step": 1105 }, { "epoch": 8.848, "grad_norm": 74.6778793334961, "learning_rate": 4.396e-05, "loss": 3.3489, "step": 1106 }, { "epoch": 8.856, "grad_norm": 39.965335845947266, "learning_rate": 4.4000000000000006e-05, "loss": 1.5692, "step": 1107 }, { "epoch": 8.864, "grad_norm": 724.7208251953125, "learning_rate": 4.4040000000000005e-05, "loss": 2.155, "step": 1108 }, { "epoch": 8.872, "grad_norm": 55.7608528137207, "learning_rate": 4.4080000000000005e-05, "loss": 2.0697, "step": 1109 }, { "epoch": 8.88, "grad_norm": 28.39957046508789, "learning_rate": 4.412e-05, "loss": 1.4595, "step": 1110 }, { "epoch": 8.888, "grad_norm": 210.3000946044922, "learning_rate": 4.4160000000000004e-05, "loss": 1.6343, "step": 1111 }, { "epoch": 8.896, "grad_norm": 50.10006332397461, "learning_rate": 4.4200000000000004e-05, "loss": 1.555, "step": 1112 }, { "epoch": 8.904, "grad_norm": 170.37936401367188, "learning_rate": 4.424e-05, "loss": 3.3727, "step": 1113 }, { "epoch": 8.912, "grad_norm": 44.30866622924805, "learning_rate": 4.428e-05, "loss": 2.0595, "step": 1114 }, { "epoch": 8.92, "grad_norm": 63.778568267822266, "learning_rate": 4.432e-05, "loss": 1.5711, "step": 1115 }, { "epoch": 8.928, "grad_norm": 31.71515655517578, "learning_rate": 4.436e-05, "loss": 2.1104, "step": 1116 }, { "epoch": 8.936, "grad_norm": 51.59070587158203, "learning_rate": 4.44e-05, "loss": 1.5861, "step": 1117 }, { "epoch": 8.943999999999999, "grad_norm": 47.14512634277344, "learning_rate": 4.444e-05, "loss": 1.9259, "step": 1118 }, { "epoch": 8.952, "grad_norm": 108.53536224365234, "learning_rate": 4.448e-05, "loss": 1.9534, "step": 1119 }, { "epoch": 8.96, "grad_norm": 24.528221130371094, "learning_rate": 4.452e-05, "loss": 1.5073, "step": 1120 }, { "epoch": 8.968, "grad_norm": 55.59872055053711, "learning_rate": 4.456e-05, "loss": 2.0826, "step": 1121 }, { "epoch": 8.975999999999999, "grad_norm": 59.40151596069336, "learning_rate": 4.46e-05, "loss": 2.483, "step": 1122 }, { "epoch": 8.984, "grad_norm": 26.981708526611328, "learning_rate": 4.4640000000000006e-05, "loss": 1.3666, "step": 1123 }, { "epoch": 8.992, "grad_norm": 71.20948791503906, "learning_rate": 4.468e-05, "loss": 1.6078, "step": 1124 }, { "epoch": 9.0, "grad_norm": 72.37503051757812, "learning_rate": 4.472e-05, "loss": 1.3882, "step": 1125 }, { "epoch": 9.0, "eval_loss": 1.8629357814788818, "eval_map": 0.1306, "eval_map_50": 0.2625, "eval_map_75": 0.1079, "eval_map_Coverall": 0.3948, "eval_map_Face_Shield": 0.0989, "eval_map_Gloves": 0.0868, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0723, "eval_map_large": 0.1554, "eval_map_medium": 0.0388, "eval_map_small": 0.048, "eval_mar_1": 0.1401, "eval_mar_10": 0.2532, "eval_mar_100": 0.2924, "eval_mar_100_Coverall": 0.6156, "eval_mar_100_Face_Shield": 0.1706, "eval_mar_100_Gloves": 0.3492, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.3269, "eval_mar_large": 0.3043, "eval_mar_medium": 0.1573, "eval_mar_small": 0.1263, "eval_runtime": 0.9124, "eval_samples_per_second": 31.786, "eval_steps_per_second": 2.192, "step": 1125 }, { "epoch": 9.008, "grad_norm": 56.177852630615234, "learning_rate": 4.4760000000000005e-05, "loss": 1.7631, "step": 1126 }, { "epoch": 9.016, "grad_norm": 187.3551483154297, "learning_rate": 4.4800000000000005e-05, "loss": 1.4822, "step": 1127 }, { "epoch": 9.024, "grad_norm": 59.3778190612793, "learning_rate": 4.4840000000000004e-05, "loss": 1.2846, "step": 1128 }, { "epoch": 9.032, "grad_norm": 83.79129028320312, "learning_rate": 4.488e-05, "loss": 1.9669, "step": 1129 }, { "epoch": 9.04, "grad_norm": 44.691200256347656, "learning_rate": 4.4920000000000004e-05, "loss": 2.1037, "step": 1130 }, { "epoch": 9.048, "grad_norm": 45.26520919799805, "learning_rate": 4.496e-05, "loss": 1.549, "step": 1131 }, { "epoch": 9.056, "grad_norm": 81.58961486816406, "learning_rate": 4.5e-05, "loss": 2.335, "step": 1132 }, { "epoch": 9.064, "grad_norm": 35.18939208984375, "learning_rate": 4.504e-05, "loss": 1.6332, "step": 1133 }, { "epoch": 9.072, "grad_norm": 53.35359191894531, "learning_rate": 4.508e-05, "loss": 1.7738, "step": 1134 }, { "epoch": 9.08, "grad_norm": 33.36067581176758, "learning_rate": 4.512e-05, "loss": 2.1878, "step": 1135 }, { "epoch": 9.088, "grad_norm": 98.03429412841797, "learning_rate": 4.516e-05, "loss": 1.6843, "step": 1136 }, { "epoch": 9.096, "grad_norm": 271.0297546386719, "learning_rate": 4.52e-05, "loss": 2.0004, "step": 1137 }, { "epoch": 9.104, "grad_norm": 22.28072738647461, "learning_rate": 4.524000000000001e-05, "loss": 1.6553, "step": 1138 }, { "epoch": 9.112, "grad_norm": 89.61740112304688, "learning_rate": 4.528e-05, "loss": 2.3289, "step": 1139 }, { "epoch": 9.12, "grad_norm": 73.81854248046875, "learning_rate": 4.532e-05, "loss": 1.6633, "step": 1140 }, { "epoch": 9.128, "grad_norm": 64.0552978515625, "learning_rate": 4.536e-05, "loss": 1.7303, "step": 1141 }, { "epoch": 9.136, "grad_norm": 50.35757064819336, "learning_rate": 4.5400000000000006e-05, "loss": 1.5358, "step": 1142 }, { "epoch": 9.144, "grad_norm": 52.22535705566406, "learning_rate": 4.5440000000000005e-05, "loss": 1.8959, "step": 1143 }, { "epoch": 9.152, "grad_norm": 56.51514434814453, "learning_rate": 4.548e-05, "loss": 1.8175, "step": 1144 }, { "epoch": 9.16, "grad_norm": 42.85686111450195, "learning_rate": 4.5520000000000005e-05, "loss": 1.5191, "step": 1145 }, { "epoch": 9.168, "grad_norm": 31.118284225463867, "learning_rate": 4.5560000000000004e-05, "loss": 1.352, "step": 1146 }, { "epoch": 9.176, "grad_norm": 35.955467224121094, "learning_rate": 4.5600000000000004e-05, "loss": 1.1598, "step": 1147 }, { "epoch": 9.184, "grad_norm": 36.105316162109375, "learning_rate": 4.564e-05, "loss": 1.6874, "step": 1148 }, { "epoch": 9.192, "grad_norm": 23.90782356262207, "learning_rate": 4.568e-05, "loss": 1.8694, "step": 1149 }, { "epoch": 9.2, "grad_norm": 31.9908447265625, "learning_rate": 4.572e-05, "loss": 2.0382, "step": 1150 }, { "epoch": 9.208, "grad_norm": 51.12784957885742, "learning_rate": 4.576e-05, "loss": 1.6393, "step": 1151 }, { "epoch": 9.216, "grad_norm": 32.6817741394043, "learning_rate": 4.58e-05, "loss": 1.421, "step": 1152 }, { "epoch": 9.224, "grad_norm": 19.771520614624023, "learning_rate": 4.584e-05, "loss": 1.6391, "step": 1153 }, { "epoch": 9.232, "grad_norm": 49.149436950683594, "learning_rate": 4.588e-05, "loss": 1.5697, "step": 1154 }, { "epoch": 9.24, "grad_norm": 20.370513916015625, "learning_rate": 4.592e-05, "loss": 2.0693, "step": 1155 }, { "epoch": 9.248, "grad_norm": 27.83523941040039, "learning_rate": 4.596e-05, "loss": 1.6295, "step": 1156 }, { "epoch": 9.256, "grad_norm": 59.94819259643555, "learning_rate": 4.600000000000001e-05, "loss": 1.622, "step": 1157 }, { "epoch": 9.264, "grad_norm": 55.390438079833984, "learning_rate": 4.604e-05, "loss": 2.2452, "step": 1158 }, { "epoch": 9.272, "grad_norm": 30.385921478271484, "learning_rate": 4.608e-05, "loss": 1.6348, "step": 1159 }, { "epoch": 9.28, "grad_norm": 41.30160140991211, "learning_rate": 4.612e-05, "loss": 1.2663, "step": 1160 }, { "epoch": 9.288, "grad_norm": 23.940664291381836, "learning_rate": 4.6160000000000005e-05, "loss": 1.5305, "step": 1161 }, { "epoch": 9.296, "grad_norm": 36.786190032958984, "learning_rate": 4.6200000000000005e-05, "loss": 1.7517, "step": 1162 }, { "epoch": 9.304, "grad_norm": 45.43473434448242, "learning_rate": 4.624e-05, "loss": 1.9564, "step": 1163 }, { "epoch": 9.312, "grad_norm": 47.81510543823242, "learning_rate": 4.6280000000000004e-05, "loss": 2.0176, "step": 1164 }, { "epoch": 9.32, "grad_norm": 30.248764038085938, "learning_rate": 4.6320000000000004e-05, "loss": 1.4472, "step": 1165 }, { "epoch": 9.328, "grad_norm": 21.763460159301758, "learning_rate": 4.636e-05, "loss": 1.9501, "step": 1166 }, { "epoch": 9.336, "grad_norm": 26.344465255737305, "learning_rate": 4.64e-05, "loss": 2.0544, "step": 1167 }, { "epoch": 9.344, "grad_norm": 35.232749938964844, "learning_rate": 4.644e-05, "loss": 2.2512, "step": 1168 }, { "epoch": 9.352, "grad_norm": 104.20940399169922, "learning_rate": 4.648e-05, "loss": 1.2703, "step": 1169 }, { "epoch": 9.36, "grad_norm": 39.925201416015625, "learning_rate": 4.652e-05, "loss": 1.9204, "step": 1170 }, { "epoch": 9.368, "grad_norm": 47.42292022705078, "learning_rate": 4.656e-05, "loss": 2.0483, "step": 1171 }, { "epoch": 9.376, "grad_norm": 24.529680252075195, "learning_rate": 4.660000000000001e-05, "loss": 1.7567, "step": 1172 }, { "epoch": 9.384, "grad_norm": 31.856830596923828, "learning_rate": 4.664e-05, "loss": 1.7145, "step": 1173 }, { "epoch": 9.392, "grad_norm": 23.964954376220703, "learning_rate": 4.668e-05, "loss": 1.8813, "step": 1174 }, { "epoch": 9.4, "grad_norm": 52.84688186645508, "learning_rate": 4.672e-05, "loss": 1.6903, "step": 1175 }, { "epoch": 9.408, "grad_norm": 26.650814056396484, "learning_rate": 4.6760000000000006e-05, "loss": 1.4735, "step": 1176 }, { "epoch": 9.416, "grad_norm": 62.299930572509766, "learning_rate": 4.6800000000000006e-05, "loss": 1.3623, "step": 1177 }, { "epoch": 9.424, "grad_norm": 37.21135330200195, "learning_rate": 4.684e-05, "loss": 2.3257, "step": 1178 }, { "epoch": 9.432, "grad_norm": 29.542675018310547, "learning_rate": 4.688e-05, "loss": 1.6061, "step": 1179 }, { "epoch": 9.44, "grad_norm": 84.93757629394531, "learning_rate": 4.6920000000000005e-05, "loss": 1.9067, "step": 1180 }, { "epoch": 9.448, "grad_norm": 92.04182434082031, "learning_rate": 4.6960000000000004e-05, "loss": 1.6214, "step": 1181 }, { "epoch": 9.456, "grad_norm": 32.566131591796875, "learning_rate": 4.7e-05, "loss": 1.6694, "step": 1182 }, { "epoch": 9.464, "grad_norm": 48.99953842163086, "learning_rate": 4.7040000000000004e-05, "loss": 1.6825, "step": 1183 }, { "epoch": 9.472, "grad_norm": 30.89815902709961, "learning_rate": 4.708e-05, "loss": 1.7716, "step": 1184 }, { "epoch": 9.48, "grad_norm": 123.16376495361328, "learning_rate": 4.712e-05, "loss": 2.0349, "step": 1185 }, { "epoch": 9.488, "grad_norm": 25.76966094970703, "learning_rate": 4.716e-05, "loss": 1.7442, "step": 1186 }, { "epoch": 9.496, "grad_norm": 32.96416091918945, "learning_rate": 4.72e-05, "loss": 2.064, "step": 1187 }, { "epoch": 9.504, "grad_norm": 43.278507232666016, "learning_rate": 4.724e-05, "loss": 1.8982, "step": 1188 }, { "epoch": 9.512, "grad_norm": 43.76283264160156, "learning_rate": 4.728e-05, "loss": 1.52, "step": 1189 }, { "epoch": 9.52, "grad_norm": 50.7393684387207, "learning_rate": 4.732e-05, "loss": 1.424, "step": 1190 }, { "epoch": 9.528, "grad_norm": 62.63689041137695, "learning_rate": 4.736000000000001e-05, "loss": 1.4215, "step": 1191 }, { "epoch": 9.536, "grad_norm": 61.96453857421875, "learning_rate": 4.74e-05, "loss": 2.1654, "step": 1192 }, { "epoch": 9.544, "grad_norm": 55.63090133666992, "learning_rate": 4.744e-05, "loss": 1.6142, "step": 1193 }, { "epoch": 9.552, "grad_norm": 33.627010345458984, "learning_rate": 4.748e-05, "loss": 1.5248, "step": 1194 }, { "epoch": 9.56, "grad_norm": 36.73600769042969, "learning_rate": 4.7520000000000006e-05, "loss": 1.6542, "step": 1195 }, { "epoch": 9.568, "grad_norm": 32.811302185058594, "learning_rate": 4.7560000000000005e-05, "loss": 1.6316, "step": 1196 }, { "epoch": 9.576, "grad_norm": 43.323402404785156, "learning_rate": 4.76e-05, "loss": 1.7567, "step": 1197 }, { "epoch": 9.584, "grad_norm": 37.21403121948242, "learning_rate": 4.7640000000000005e-05, "loss": 1.5059, "step": 1198 }, { "epoch": 9.592, "grad_norm": 30.278976440429688, "learning_rate": 4.7680000000000004e-05, "loss": 1.5762, "step": 1199 }, { "epoch": 9.6, "grad_norm": 99.72752380371094, "learning_rate": 4.7720000000000004e-05, "loss": 1.717, "step": 1200 }, { "epoch": 9.608, "grad_norm": 33.92728042602539, "learning_rate": 4.7760000000000004e-05, "loss": 2.1494, "step": 1201 }, { "epoch": 9.616, "grad_norm": 105.83539581298828, "learning_rate": 4.78e-05, "loss": 2.2551, "step": 1202 }, { "epoch": 9.624, "grad_norm": 99.39824676513672, "learning_rate": 4.784e-05, "loss": 3.798, "step": 1203 }, { "epoch": 9.632, "grad_norm": 971.8486328125, "learning_rate": 4.788e-05, "loss": 2.3168, "step": 1204 }, { "epoch": 9.64, "grad_norm": 28.63852882385254, "learning_rate": 4.792e-05, "loss": 2.1529, "step": 1205 }, { "epoch": 9.648, "grad_norm": 33.97957992553711, "learning_rate": 4.796e-05, "loss": 1.827, "step": 1206 }, { "epoch": 9.656, "grad_norm": 40.96877670288086, "learning_rate": 4.8e-05, "loss": 1.8614, "step": 1207 }, { "epoch": 9.664, "grad_norm": 51.83784484863281, "learning_rate": 4.804e-05, "loss": 1.5223, "step": 1208 }, { "epoch": 9.672, "grad_norm": 30.291786193847656, "learning_rate": 4.808e-05, "loss": 1.7812, "step": 1209 }, { "epoch": 9.68, "grad_norm": 36.49685287475586, "learning_rate": 4.812000000000001e-05, "loss": 1.9995, "step": 1210 }, { "epoch": 9.688, "grad_norm": 168.15597534179688, "learning_rate": 4.816e-05, "loss": 1.8174, "step": 1211 }, { "epoch": 9.696, "grad_norm": 44.41680908203125, "learning_rate": 4.82e-05, "loss": 2.0118, "step": 1212 }, { "epoch": 9.704, "grad_norm": 39.143611907958984, "learning_rate": 4.824e-05, "loss": 1.9905, "step": 1213 }, { "epoch": 9.712, "grad_norm": 32.97753143310547, "learning_rate": 4.8280000000000005e-05, "loss": 1.7686, "step": 1214 }, { "epoch": 9.72, "grad_norm": 74.6324691772461, "learning_rate": 4.8320000000000005e-05, "loss": 2.6676, "step": 1215 }, { "epoch": 9.728, "grad_norm": 56.96977996826172, "learning_rate": 4.836e-05, "loss": 1.8706, "step": 1216 }, { "epoch": 9.736, "grad_norm": 37.03286361694336, "learning_rate": 4.8400000000000004e-05, "loss": 1.5676, "step": 1217 }, { "epoch": 9.744, "grad_norm": 25.921586990356445, "learning_rate": 4.8440000000000004e-05, "loss": 1.7898, "step": 1218 }, { "epoch": 9.752, "grad_norm": 44.934749603271484, "learning_rate": 4.8480000000000003e-05, "loss": 1.9429, "step": 1219 }, { "epoch": 9.76, "grad_norm": 28.46799087524414, "learning_rate": 4.852e-05, "loss": 1.7302, "step": 1220 }, { "epoch": 9.768, "grad_norm": 25.506818771362305, "learning_rate": 4.856e-05, "loss": 1.567, "step": 1221 }, { "epoch": 9.776, "grad_norm": 64.08396911621094, "learning_rate": 4.86e-05, "loss": 1.7296, "step": 1222 }, { "epoch": 9.784, "grad_norm": 25.726673126220703, "learning_rate": 4.864e-05, "loss": 1.7214, "step": 1223 }, { "epoch": 9.792, "grad_norm": 58.66437530517578, "learning_rate": 4.868e-05, "loss": 1.5514, "step": 1224 }, { "epoch": 9.8, "grad_norm": 40.895660400390625, "learning_rate": 4.872000000000001e-05, "loss": 2.0472, "step": 1225 }, { "epoch": 9.808, "grad_norm": 93.60547637939453, "learning_rate": 4.876e-05, "loss": 1.6299, "step": 1226 }, { "epoch": 9.816, "grad_norm": 69.75370788574219, "learning_rate": 4.88e-05, "loss": 2.0795, "step": 1227 }, { "epoch": 9.824, "grad_norm": 18.24615478515625, "learning_rate": 4.884e-05, "loss": 1.656, "step": 1228 }, { "epoch": 9.832, "grad_norm": 107.6475601196289, "learning_rate": 4.8880000000000006e-05, "loss": 1.8648, "step": 1229 }, { "epoch": 9.84, "grad_norm": 40.11996078491211, "learning_rate": 4.8920000000000006e-05, "loss": 2.2606, "step": 1230 }, { "epoch": 9.848, "grad_norm": 42.08393096923828, "learning_rate": 4.896e-05, "loss": 1.3131, "step": 1231 }, { "epoch": 9.856, "grad_norm": 32.637264251708984, "learning_rate": 4.9e-05, "loss": 1.791, "step": 1232 }, { "epoch": 9.864, "grad_norm": 60.966529846191406, "learning_rate": 4.9040000000000005e-05, "loss": 1.5967, "step": 1233 }, { "epoch": 9.872, "grad_norm": 77.09776306152344, "learning_rate": 4.9080000000000004e-05, "loss": 3.4938, "step": 1234 }, { "epoch": 9.88, "grad_norm": 55.894683837890625, "learning_rate": 4.9120000000000004e-05, "loss": 1.4639, "step": 1235 }, { "epoch": 9.888, "grad_norm": 71.3500747680664, "learning_rate": 4.9160000000000004e-05, "loss": 1.6616, "step": 1236 }, { "epoch": 9.896, "grad_norm": 29.699565887451172, "learning_rate": 4.92e-05, "loss": 1.5202, "step": 1237 }, { "epoch": 9.904, "grad_norm": 59.33797073364258, "learning_rate": 4.924e-05, "loss": 1.3983, "step": 1238 }, { "epoch": 9.912, "grad_norm": 42.436405181884766, "learning_rate": 4.928e-05, "loss": 1.6564, "step": 1239 }, { "epoch": 9.92, "grad_norm": 39.656314849853516, "learning_rate": 4.932e-05, "loss": 1.6722, "step": 1240 }, { "epoch": 9.928, "grad_norm": 294.8619079589844, "learning_rate": 4.936e-05, "loss": 2.4256, "step": 1241 }, { "epoch": 9.936, "grad_norm": 25.31249237060547, "learning_rate": 4.94e-05, "loss": 2.177, "step": 1242 }, { "epoch": 9.943999999999999, "grad_norm": 40.74161911010742, "learning_rate": 4.944e-05, "loss": 1.9897, "step": 1243 }, { "epoch": 9.952, "grad_norm": 95.97357177734375, "learning_rate": 4.948000000000001e-05, "loss": 1.7449, "step": 1244 }, { "epoch": 9.96, "grad_norm": 38.70718002319336, "learning_rate": 4.952e-05, "loss": 1.5887, "step": 1245 }, { "epoch": 9.968, "grad_norm": 40.28546142578125, "learning_rate": 4.956e-05, "loss": 2.0006, "step": 1246 }, { "epoch": 9.975999999999999, "grad_norm": 101.55790710449219, "learning_rate": 4.96e-05, "loss": 2.0212, "step": 1247 }, { "epoch": 9.984, "grad_norm": 45.089515686035156, "learning_rate": 4.9640000000000006e-05, "loss": 2.3107, "step": 1248 }, { "epoch": 9.992, "grad_norm": 92.20901489257812, "learning_rate": 4.9680000000000005e-05, "loss": 1.6645, "step": 1249 }, { "epoch": 10.0, "grad_norm": 330.3880310058594, "learning_rate": 4.972e-05, "loss": 1.7824, "step": 1250 }, { "epoch": 10.0, "eval_loss": 2.001730442047119, "eval_map": 0.1161, "eval_map_50": 0.2508, "eval_map_75": 0.0892, "eval_map_Coverall": 0.3143, "eval_map_Face_Shield": 0.0356, "eval_map_Gloves": 0.0831, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.1472, "eval_map_large": 0.1271, "eval_map_medium": 0.0586, "eval_map_small": 0.0496, "eval_mar_1": 0.1052, "eval_mar_10": 0.212, "eval_mar_100": 0.2227, "eval_mar_100_Coverall": 0.4978, "eval_mar_100_Face_Shield": 0.0353, "eval_mar_100_Gloves": 0.2246, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.3558, "eval_mar_large": 0.2352, "eval_mar_medium": 0.13, "eval_mar_small": 0.1098, "eval_runtime": 0.9154, "eval_samples_per_second": 31.681, "eval_steps_per_second": 2.185, "step": 1250 }, { "epoch": 10.008, "grad_norm": 78.28111267089844, "learning_rate": 4.976e-05, "loss": 1.3574, "step": 1251 }, { "epoch": 10.016, "grad_norm": 25.253299713134766, "learning_rate": 4.9800000000000004e-05, "loss": 1.8684, "step": 1252 }, { "epoch": 10.024, "grad_norm": 1969.915771484375, "learning_rate": 4.9840000000000004e-05, "loss": 2.0246, "step": 1253 }, { "epoch": 10.032, "grad_norm": 23.511550903320312, "learning_rate": 4.9880000000000004e-05, "loss": 1.9402, "step": 1254 }, { "epoch": 10.04, "grad_norm": 124.15755462646484, "learning_rate": 4.992e-05, "loss": 2.2255, "step": 1255 }, { "epoch": 10.048, "grad_norm": 74.7978744506836, "learning_rate": 4.996e-05, "loss": 1.8637, "step": 1256 }, { "epoch": 10.056, "grad_norm": 76.99971008300781, "learning_rate": 5e-05, "loss": 1.849, "step": 1257 }, { "epoch": 10.064, "grad_norm": 106.57133483886719, "learning_rate": 4.999555555555556e-05, "loss": 3.1823, "step": 1258 }, { "epoch": 10.072, "grad_norm": 41.50003433227539, "learning_rate": 4.999111111111111e-05, "loss": 3.6946, "step": 1259 }, { "epoch": 10.08, "grad_norm": 65.02320098876953, "learning_rate": 4.9986666666666674e-05, "loss": 2.5618, "step": 1260 }, { "epoch": 10.088, "grad_norm": 91.68412017822266, "learning_rate": 4.998222222222222e-05, "loss": 2.7887, "step": 1261 }, { "epoch": 10.096, "grad_norm": 381.4338073730469, "learning_rate": 4.997777777777778e-05, "loss": 2.3782, "step": 1262 }, { "epoch": 10.104, "grad_norm": 401.7804260253906, "learning_rate": 4.997333333333333e-05, "loss": 2.6666, "step": 1263 }, { "epoch": 10.112, "grad_norm": 39.280433654785156, "learning_rate": 4.996888888888889e-05, "loss": 2.3192, "step": 1264 }, { "epoch": 10.12, "grad_norm": 45.4619255065918, "learning_rate": 4.996444444444445e-05, "loss": 1.9902, "step": 1265 }, { "epoch": 10.128, "grad_norm": 112.6328353881836, "learning_rate": 4.996e-05, "loss": 2.8948, "step": 1266 }, { "epoch": 10.136, "grad_norm": 37.33510971069336, "learning_rate": 4.995555555555556e-05, "loss": 2.4929, "step": 1267 }, { "epoch": 10.144, "grad_norm": 55.2027702331543, "learning_rate": 4.995111111111111e-05, "loss": 2.4442, "step": 1268 }, { "epoch": 10.152, "grad_norm": 489.08343505859375, "learning_rate": 4.994666666666667e-05, "loss": 2.8272, "step": 1269 }, { "epoch": 10.16, "grad_norm": 48.24622344970703, "learning_rate": 4.994222222222222e-05, "loss": 1.5461, "step": 1270 }, { "epoch": 10.168, "grad_norm": 35.82722091674805, "learning_rate": 4.993777777777778e-05, "loss": 1.7969, "step": 1271 }, { "epoch": 10.176, "grad_norm": 32.82820129394531, "learning_rate": 4.993333333333334e-05, "loss": 2.1466, "step": 1272 }, { "epoch": 10.184, "grad_norm": 97.893310546875, "learning_rate": 4.9928888888888893e-05, "loss": 1.9113, "step": 1273 }, { "epoch": 10.192, "grad_norm": 822.072265625, "learning_rate": 4.992444444444445e-05, "loss": 1.9299, "step": 1274 }, { "epoch": 10.2, "grad_norm": 63.5489501953125, "learning_rate": 4.992e-05, "loss": 2.3931, "step": 1275 }, { "epoch": 10.208, "grad_norm": 34.855106353759766, "learning_rate": 4.991555555555556e-05, "loss": 1.5744, "step": 1276 }, { "epoch": 10.216, "grad_norm": 59.4627685546875, "learning_rate": 4.991111111111111e-05, "loss": 3.1571, "step": 1277 }, { "epoch": 10.224, "grad_norm": 68.3545150756836, "learning_rate": 4.990666666666667e-05, "loss": 1.6696, "step": 1278 }, { "epoch": 10.232, "grad_norm": 2829.093505859375, "learning_rate": 4.990222222222222e-05, "loss": 1.6875, "step": 1279 }, { "epoch": 10.24, "grad_norm": 86.78104400634766, "learning_rate": 4.9897777777777784e-05, "loss": 1.5805, "step": 1280 }, { "epoch": 10.248, "grad_norm": 52.14318084716797, "learning_rate": 4.989333333333334e-05, "loss": 3.2277, "step": 1281 }, { "epoch": 10.256, "grad_norm": 36.89432144165039, "learning_rate": 4.9888888888888894e-05, "loss": 1.8845, "step": 1282 }, { "epoch": 10.264, "grad_norm": 31.134349822998047, "learning_rate": 4.988444444444444e-05, "loss": 1.7071, "step": 1283 }, { "epoch": 10.272, "grad_norm": 46.08787536621094, "learning_rate": 4.9880000000000004e-05, "loss": 1.4817, "step": 1284 }, { "epoch": 10.28, "grad_norm": 56.934654235839844, "learning_rate": 4.987555555555556e-05, "loss": 1.8972, "step": 1285 }, { "epoch": 10.288, "grad_norm": 38.423828125, "learning_rate": 4.987111111111111e-05, "loss": 2.0475, "step": 1286 }, { "epoch": 10.296, "grad_norm": 70.89649963378906, "learning_rate": 4.986666666666667e-05, "loss": 1.8882, "step": 1287 }, { "epoch": 10.304, "grad_norm": 88.7867202758789, "learning_rate": 4.986222222222223e-05, "loss": 1.8989, "step": 1288 }, { "epoch": 10.312, "grad_norm": 2405.44873046875, "learning_rate": 4.985777777777778e-05, "loss": 2.3339, "step": 1289 }, { "epoch": 10.32, "grad_norm": 41.404415130615234, "learning_rate": 4.985333333333333e-05, "loss": 1.6998, "step": 1290 }, { "epoch": 10.328, "grad_norm": 58.0932502746582, "learning_rate": 4.984888888888889e-05, "loss": 1.7174, "step": 1291 }, { "epoch": 10.336, "grad_norm": 49.759246826171875, "learning_rate": 4.984444444444445e-05, "loss": 2.7447, "step": 1292 }, { "epoch": 10.344, "grad_norm": 107.76566314697266, "learning_rate": 4.9840000000000004e-05, "loss": 2.3562, "step": 1293 }, { "epoch": 10.352, "grad_norm": 111.31034851074219, "learning_rate": 4.983555555555556e-05, "loss": 2.3092, "step": 1294 }, { "epoch": 10.36, "grad_norm": 41.12085723876953, "learning_rate": 4.9831111111111114e-05, "loss": 2.5563, "step": 1295 }, { "epoch": 10.368, "grad_norm": 32.50931167602539, "learning_rate": 4.982666666666667e-05, "loss": 2.2802, "step": 1296 }, { "epoch": 10.376, "grad_norm": 139.36221313476562, "learning_rate": 4.982222222222222e-05, "loss": 3.0173, "step": 1297 }, { "epoch": 10.384, "grad_norm": 32.87834167480469, "learning_rate": 4.981777777777778e-05, "loss": 2.7681, "step": 1298 }, { "epoch": 10.392, "grad_norm": 94.72676086425781, "learning_rate": 4.981333333333333e-05, "loss": 2.0291, "step": 1299 }, { "epoch": 10.4, "grad_norm": 58.15901565551758, "learning_rate": 4.9808888888888895e-05, "loss": 2.0085, "step": 1300 }, { "epoch": 10.408, "grad_norm": 27.50676727294922, "learning_rate": 4.980444444444445e-05, "loss": 1.2767, "step": 1301 }, { "epoch": 10.416, "grad_norm": 321.8076171875, "learning_rate": 4.9800000000000004e-05, "loss": 2.161, "step": 1302 }, { "epoch": 10.424, "grad_norm": 31.06268310546875, "learning_rate": 4.979555555555556e-05, "loss": 2.0738, "step": 1303 }, { "epoch": 10.432, "grad_norm": 28.065303802490234, "learning_rate": 4.9791111111111114e-05, "loss": 1.6252, "step": 1304 }, { "epoch": 10.44, "grad_norm": 45.86648941040039, "learning_rate": 4.978666666666667e-05, "loss": 1.8131, "step": 1305 }, { "epoch": 10.448, "grad_norm": 33.04439163208008, "learning_rate": 4.9782222222222224e-05, "loss": 1.9028, "step": 1306 }, { "epoch": 10.456, "grad_norm": 93.91287231445312, "learning_rate": 4.977777777777778e-05, "loss": 1.9955, "step": 1307 }, { "epoch": 10.464, "grad_norm": 87.30586242675781, "learning_rate": 4.977333333333334e-05, "loss": 1.8131, "step": 1308 }, { "epoch": 10.472, "grad_norm": 229.03309631347656, "learning_rate": 4.9768888888888895e-05, "loss": 1.662, "step": 1309 }, { "epoch": 10.48, "grad_norm": 26.347660064697266, "learning_rate": 4.976444444444445e-05, "loss": 2.4351, "step": 1310 }, { "epoch": 10.488, "grad_norm": 172.1866912841797, "learning_rate": 4.976e-05, "loss": 1.5857, "step": 1311 }, { "epoch": 10.496, "grad_norm": 38.98664474487305, "learning_rate": 4.975555555555555e-05, "loss": 1.9113, "step": 1312 }, { "epoch": 10.504, "grad_norm": 112.9964599609375, "learning_rate": 4.9751111111111114e-05, "loss": 2.0239, "step": 1313 }, { "epoch": 10.512, "grad_norm": 58.18387985229492, "learning_rate": 4.974666666666667e-05, "loss": 2.0928, "step": 1314 }, { "epoch": 10.52, "grad_norm": 106.22445678710938, "learning_rate": 4.9742222222222224e-05, "loss": 1.6, "step": 1315 }, { "epoch": 10.528, "grad_norm": 42.77871322631836, "learning_rate": 4.973777777777778e-05, "loss": 1.8091, "step": 1316 }, { "epoch": 10.536, "grad_norm": 112.50556945800781, "learning_rate": 4.973333333333334e-05, "loss": 1.7965, "step": 1317 }, { "epoch": 10.544, "grad_norm": 48.66276931762695, "learning_rate": 4.972888888888889e-05, "loss": 1.2279, "step": 1318 }, { "epoch": 10.552, "grad_norm": 41.136924743652344, "learning_rate": 4.9724444444444443e-05, "loss": 1.7361, "step": 1319 }, { "epoch": 10.56, "grad_norm": 192.51466369628906, "learning_rate": 4.972e-05, "loss": 1.9926, "step": 1320 }, { "epoch": 10.568, "grad_norm": 63.04132843017578, "learning_rate": 4.971555555555556e-05, "loss": 1.9676, "step": 1321 }, { "epoch": 10.576, "grad_norm": 46.48244094848633, "learning_rate": 4.9711111111111115e-05, "loss": 1.9366, "step": 1322 }, { "epoch": 10.584, "grad_norm": 44.46727752685547, "learning_rate": 4.970666666666667e-05, "loss": 2.1431, "step": 1323 }, { "epoch": 10.592, "grad_norm": 64.88440704345703, "learning_rate": 4.9702222222222224e-05, "loss": 2.0869, "step": 1324 }, { "epoch": 10.6, "grad_norm": 39.200199127197266, "learning_rate": 4.969777777777778e-05, "loss": 1.9101, "step": 1325 }, { "epoch": 10.608, "grad_norm": 66.43135833740234, "learning_rate": 4.9693333333333334e-05, "loss": 2.591, "step": 1326 }, { "epoch": 10.616, "grad_norm": 87.45823669433594, "learning_rate": 4.968888888888889e-05, "loss": 2.0897, "step": 1327 }, { "epoch": 10.624, "grad_norm": 70.90769958496094, "learning_rate": 4.9684444444444444e-05, "loss": 1.7422, "step": 1328 }, { "epoch": 10.632, "grad_norm": 63.676937103271484, "learning_rate": 4.9680000000000005e-05, "loss": 1.6494, "step": 1329 }, { "epoch": 10.64, "grad_norm": 49.850914001464844, "learning_rate": 4.967555555555556e-05, "loss": 2.2528, "step": 1330 }, { "epoch": 10.648, "grad_norm": 105.69208526611328, "learning_rate": 4.9671111111111115e-05, "loss": 1.7224, "step": 1331 }, { "epoch": 10.656, "grad_norm": 127.28730773925781, "learning_rate": 4.966666666666667e-05, "loss": 2.1506, "step": 1332 }, { "epoch": 10.664, "grad_norm": 98.3475112915039, "learning_rate": 4.9662222222222225e-05, "loss": 1.9789, "step": 1333 }, { "epoch": 10.672, "grad_norm": 87.00035095214844, "learning_rate": 4.965777777777778e-05, "loss": 2.0153, "step": 1334 }, { "epoch": 10.68, "grad_norm": 94.52082061767578, "learning_rate": 4.9653333333333335e-05, "loss": 1.7833, "step": 1335 }, { "epoch": 10.688, "grad_norm": 81.25870513916016, "learning_rate": 4.964888888888889e-05, "loss": 1.6342, "step": 1336 }, { "epoch": 10.696, "grad_norm": 37.814876556396484, "learning_rate": 4.964444444444445e-05, "loss": 1.888, "step": 1337 }, { "epoch": 10.704, "grad_norm": 109.5797348022461, "learning_rate": 4.9640000000000006e-05, "loss": 1.8576, "step": 1338 }, { "epoch": 10.712, "grad_norm": 36.44534683227539, "learning_rate": 4.963555555555556e-05, "loss": 1.9138, "step": 1339 }, { "epoch": 10.72, "grad_norm": 43.675960540771484, "learning_rate": 4.963111111111111e-05, "loss": 1.5382, "step": 1340 }, { "epoch": 10.728, "grad_norm": 27.78511619567871, "learning_rate": 4.962666666666667e-05, "loss": 1.7049, "step": 1341 }, { "epoch": 10.736, "grad_norm": 75.89983367919922, "learning_rate": 4.9622222222222225e-05, "loss": 2.1986, "step": 1342 }, { "epoch": 10.744, "grad_norm": 215.8705291748047, "learning_rate": 4.961777777777778e-05, "loss": 1.4243, "step": 1343 }, { "epoch": 10.752, "grad_norm": 84.08383178710938, "learning_rate": 4.9613333333333335e-05, "loss": 2.0292, "step": 1344 }, { "epoch": 10.76, "grad_norm": 50.13309097290039, "learning_rate": 4.9608888888888897e-05, "loss": 1.9727, "step": 1345 }, { "epoch": 10.768, "grad_norm": 34.047733306884766, "learning_rate": 4.9604444444444445e-05, "loss": 1.9039, "step": 1346 }, { "epoch": 10.776, "grad_norm": 56.1852912902832, "learning_rate": 4.96e-05, "loss": 2.5242, "step": 1347 }, { "epoch": 10.784, "grad_norm": 42.05307388305664, "learning_rate": 4.9595555555555554e-05, "loss": 1.4874, "step": 1348 }, { "epoch": 10.792, "grad_norm": 694.892578125, "learning_rate": 4.9591111111111116e-05, "loss": 2.1047, "step": 1349 }, { "epoch": 10.8, "grad_norm": 39.125946044921875, "learning_rate": 4.958666666666667e-05, "loss": 1.7373, "step": 1350 }, { "epoch": 10.808, "grad_norm": 78.88011169433594, "learning_rate": 4.9582222222222226e-05, "loss": 1.2509, "step": 1351 }, { "epoch": 10.816, "grad_norm": 49.56678009033203, "learning_rate": 4.957777777777778e-05, "loss": 2.0853, "step": 1352 }, { "epoch": 10.824, "grad_norm": 43.0755615234375, "learning_rate": 4.9573333333333335e-05, "loss": 1.9397, "step": 1353 }, { "epoch": 10.832, "grad_norm": 39.609230041503906, "learning_rate": 4.956888888888889e-05, "loss": 2.1526, "step": 1354 }, { "epoch": 10.84, "grad_norm": 28.030235290527344, "learning_rate": 4.9564444444444445e-05, "loss": 1.6037, "step": 1355 }, { "epoch": 10.848, "grad_norm": 27.67531967163086, "learning_rate": 4.956e-05, "loss": 1.589, "step": 1356 }, { "epoch": 10.856, "grad_norm": 216.0590057373047, "learning_rate": 4.955555555555556e-05, "loss": 1.9196, "step": 1357 }, { "epoch": 10.864, "grad_norm": 42.97560119628906, "learning_rate": 4.9551111111111116e-05, "loss": 1.4781, "step": 1358 }, { "epoch": 10.872, "grad_norm": 73.04395294189453, "learning_rate": 4.954666666666667e-05, "loss": 1.6621, "step": 1359 }, { "epoch": 10.88, "grad_norm": 75.8851318359375, "learning_rate": 4.9542222222222226e-05, "loss": 1.8321, "step": 1360 }, { "epoch": 10.888, "grad_norm": 44.7178840637207, "learning_rate": 4.9537777777777774e-05, "loss": 1.2866, "step": 1361 }, { "epoch": 10.896, "grad_norm": 76.96936798095703, "learning_rate": 4.9533333333333336e-05, "loss": 1.3919, "step": 1362 }, { "epoch": 10.904, "grad_norm": 41.93547821044922, "learning_rate": 4.952888888888889e-05, "loss": 1.5483, "step": 1363 }, { "epoch": 10.912, "grad_norm": 209.02413940429688, "learning_rate": 4.9524444444444445e-05, "loss": 2.1312, "step": 1364 }, { "epoch": 10.92, "grad_norm": 58.62306213378906, "learning_rate": 4.952e-05, "loss": 1.5545, "step": 1365 }, { "epoch": 10.928, "grad_norm": 40.92667007446289, "learning_rate": 4.951555555555556e-05, "loss": 1.5609, "step": 1366 }, { "epoch": 10.936, "grad_norm": 55.5924072265625, "learning_rate": 4.951111111111112e-05, "loss": 1.5801, "step": 1367 }, { "epoch": 10.943999999999999, "grad_norm": 40.77595901489258, "learning_rate": 4.9506666666666665e-05, "loss": 1.8357, "step": 1368 }, { "epoch": 10.952, "grad_norm": 100.05801391601562, "learning_rate": 4.950222222222222e-05, "loss": 1.7523, "step": 1369 }, { "epoch": 10.96, "grad_norm": 49.40412902832031, "learning_rate": 4.949777777777778e-05, "loss": 1.7576, "step": 1370 }, { "epoch": 10.968, "grad_norm": 25.06734275817871, "learning_rate": 4.9493333333333336e-05, "loss": 1.3598, "step": 1371 }, { "epoch": 10.975999999999999, "grad_norm": 34.11299514770508, "learning_rate": 4.948888888888889e-05, "loss": 2.1322, "step": 1372 }, { "epoch": 10.984, "grad_norm": 94.54263305664062, "learning_rate": 4.9484444444444446e-05, "loss": 1.5673, "step": 1373 }, { "epoch": 10.992, "grad_norm": 69.59456634521484, "learning_rate": 4.948000000000001e-05, "loss": 2.09, "step": 1374 }, { "epoch": 11.0, "grad_norm": 79.49909210205078, "learning_rate": 4.9475555555555555e-05, "loss": 1.7434, "step": 1375 }, { "epoch": 11.0, "eval_loss": 1.8319048881530762, "eval_map": 0.1506, "eval_map_50": 0.3031, "eval_map_75": 0.1143, "eval_map_Coverall": 0.4587, "eval_map_Face_Shield": 0.1132, "eval_map_Gloves": 0.0813, "eval_map_Goggles": 0.0, "eval_map_Mask": 0.0997, "eval_map_large": 0.225, "eval_map_medium": 0.054, "eval_map_small": 0.0322, "eval_mar_1": 0.1344, "eval_mar_10": 0.2809, "eval_mar_100": 0.301, "eval_mar_100_Coverall": 0.6667, "eval_mar_100_Face_Shield": 0.2353, "eval_mar_100_Gloves": 0.2377, "eval_mar_100_Goggles": 0.0, "eval_mar_100_Mask": 0.3654, "eval_mar_large": 0.377, "eval_mar_medium": 0.1482, "eval_mar_small": 0.1159, "eval_runtime": 0.9045, "eval_samples_per_second": 32.063, "eval_steps_per_second": 2.211, "step": 1375 }, { "epoch": 11.008, "grad_norm": 39.58899688720703, "learning_rate": 4.947111111111111e-05, "loss": 1.579, "step": 1376 }, { "epoch": 11.016, "grad_norm": 30.619840621948242, "learning_rate": 4.9466666666666665e-05, "loss": 2.1613, "step": 1377 }, { "epoch": 11.024, "grad_norm": 61.93499755859375, "learning_rate": 4.946222222222223e-05, "loss": 2.1259, "step": 1378 }, { "epoch": 11.032, "grad_norm": 95.45469665527344, "learning_rate": 4.945777777777778e-05, "loss": 2.1241, "step": 1379 }, { "epoch": 11.04, "grad_norm": 35.985923767089844, "learning_rate": 4.9453333333333336e-05, "loss": 1.501, "step": 1380 }, { "epoch": 11.048, "grad_norm": 52.554771423339844, "learning_rate": 4.944888888888889e-05, "loss": 2.7081, "step": 1381 }, { "epoch": 11.056, "grad_norm": 25.027090072631836, "learning_rate": 4.9444444444444446e-05, "loss": 1.8364, "step": 1382 }, { "epoch": 11.064, "grad_norm": 38.05134201049805, "learning_rate": 4.944e-05, "loss": 1.5275, "step": 1383 }, { "epoch": 11.072, "grad_norm": 51.91404724121094, "learning_rate": 4.9435555555555556e-05, "loss": 1.5892, "step": 1384 }, { "epoch": 11.08, "grad_norm": 221.5408172607422, "learning_rate": 4.943111111111111e-05, "loss": 1.5309, "step": 1385 }, { "epoch": 11.088, "grad_norm": 42.21761703491211, "learning_rate": 4.942666666666667e-05, "loss": 1.6538, "step": 1386 }, { "epoch": 11.096, "grad_norm": 26.937990188598633, "learning_rate": 4.942222222222223e-05, "loss": 1.6963, "step": 1387 }, { "epoch": 11.104, "grad_norm": 111.13455200195312, "learning_rate": 4.941777777777778e-05, "loss": 1.5747, "step": 1388 }, { "epoch": 11.112, "grad_norm": 55.4564208984375, "learning_rate": 4.941333333333334e-05, "loss": 2.6146, "step": 1389 }, { "epoch": 11.12, "grad_norm": 30.19513702392578, "learning_rate": 4.940888888888889e-05, "loss": 1.4941, "step": 1390 }, { "epoch": 11.128, "grad_norm": 30.907058715820312, "learning_rate": 4.9404444444444447e-05, "loss": 1.51, "step": 1391 }, { "epoch": 11.136, "grad_norm": 102.6593246459961, "learning_rate": 4.94e-05, "loss": 2.0433, "step": 1392 }, { "epoch": 11.144, "grad_norm": 84.18702697753906, "learning_rate": 4.9395555555555556e-05, "loss": 1.9515, "step": 1393 }, { "epoch": 11.152, "grad_norm": 43.707618713378906, "learning_rate": 4.939111111111112e-05, "loss": 1.4188, "step": 1394 }, { "epoch": 11.16, "grad_norm": 40.29589080810547, "learning_rate": 4.938666666666667e-05, "loss": 2.4371, "step": 1395 }, { "epoch": 11.168, "grad_norm": 30.056060791015625, "learning_rate": 4.938222222222223e-05, "loss": 2.1149, "step": 1396 }, { "epoch": 11.176, "grad_norm": 51.43244171142578, "learning_rate": 4.9377777777777776e-05, "loss": 1.9636, "step": 1397 }, { "epoch": 11.184, "grad_norm": 33.27004623413086, "learning_rate": 4.937333333333334e-05, "loss": 1.855, "step": 1398 }, { "epoch": 11.192, "grad_norm": 48.087318420410156, "learning_rate": 4.936888888888889e-05, "loss": 2.1216, "step": 1399 }, { "epoch": 11.2, "grad_norm": 24.091690063476562, "learning_rate": 4.936444444444445e-05, "loss": 1.7416, "step": 1400 }, { "epoch": 11.208, "grad_norm": 74.27867126464844, "learning_rate": 4.936e-05, "loss": 1.6914, "step": 1401 }, { "epoch": 11.216, "grad_norm": 53.359710693359375, "learning_rate": 4.935555555555556e-05, "loss": 1.8195, "step": 1402 }, { "epoch": 11.224, "grad_norm": 227.06434631347656, "learning_rate": 4.935111111111111e-05, "loss": 1.6055, "step": 1403 }, { "epoch": 11.232, "grad_norm": 59.06343078613281, "learning_rate": 4.9346666666666666e-05, "loss": 1.1332, "step": 1404 }, { "epoch": 11.24, "grad_norm": 30.288311004638672, "learning_rate": 4.934222222222222e-05, "loss": 1.7713, "step": 1405 }, { "epoch": 11.248, "grad_norm": 17.43877410888672, "learning_rate": 4.933777777777778e-05, "loss": 1.5018, "step": 1406 }, { "epoch": 11.256, "grad_norm": 25.74671173095703, "learning_rate": 4.933333333333334e-05, "loss": 1.5894, "step": 1407 }, { "epoch": 11.264, "grad_norm": 45.73925018310547, "learning_rate": 4.932888888888889e-05, "loss": 1.2704, "step": 1408 }, { "epoch": 11.272, "grad_norm": 29.708595275878906, "learning_rate": 4.932444444444445e-05, "loss": 1.4472, "step": 1409 }, { "epoch": 11.28, "grad_norm": 51.76169967651367, "learning_rate": 4.932e-05, "loss": 1.9744, "step": 1410 }, { "epoch": 11.288, "grad_norm": 31.43949317932129, "learning_rate": 4.931555555555556e-05, "loss": 1.5437, "step": 1411 }, { "epoch": 11.296, "grad_norm": 42.41708755493164, "learning_rate": 4.931111111111111e-05, "loss": 1.725, "step": 1412 }, { "epoch": 11.304, "grad_norm": 52.15537643432617, "learning_rate": 4.930666666666667e-05, "loss": 1.7053, "step": 1413 }, { "epoch": 11.312, "grad_norm": 30.97047996520996, "learning_rate": 4.930222222222222e-05, "loss": 1.7963, "step": 1414 }, { "epoch": 11.32, "grad_norm": 37.964542388916016, "learning_rate": 4.929777777777778e-05, "loss": 1.7153, "step": 1415 }, { "epoch": 11.328, "grad_norm": 38.58261489868164, "learning_rate": 4.929333333333334e-05, "loss": 3.4148, "step": 1416 }, { "epoch": 11.336, "grad_norm": 40.066383361816406, "learning_rate": 4.928888888888889e-05, "loss": 1.9783, "step": 1417 }, { "epoch": 11.344, "grad_norm": 26.236865997314453, "learning_rate": 4.928444444444444e-05, "loss": 1.9394, "step": 1418 }, { "epoch": 11.352, "grad_norm": 29.447776794433594, "learning_rate": 4.928e-05, "loss": 1.4236, "step": 1419 }, { "epoch": 11.36, "grad_norm": 49.19634246826172, "learning_rate": 4.927555555555556e-05, "loss": 1.6123, "step": 1420 }, { "epoch": 11.368, "grad_norm": 29.979835510253906, "learning_rate": 4.927111111111111e-05, "loss": 1.6588, "step": 1421 }, { "epoch": 11.376, "grad_norm": 215.86288452148438, "learning_rate": 4.926666666666667e-05, "loss": 1.5633, "step": 1422 }, { "epoch": 11.384, "grad_norm": 34.14611053466797, "learning_rate": 4.926222222222223e-05, "loss": 1.2357, "step": 1423 }, { "epoch": 11.392, "grad_norm": 50.366737365722656, "learning_rate": 4.9257777777777784e-05, "loss": 1.81, "step": 1424 }, { "epoch": 11.4, "grad_norm": 23.813400268554688, "learning_rate": 4.925333333333333e-05, "loss": 1.7701, "step": 1425 }, { "epoch": 11.408, "grad_norm": 44.81064224243164, "learning_rate": 4.9248888888888886e-05, "loss": 2.596, "step": 1426 }, { "epoch": 11.416, "grad_norm": 24.086681365966797, "learning_rate": 4.924444444444445e-05, "loss": 1.5255, "step": 1427 }, { "epoch": 11.424, "grad_norm": 23.60893440246582, "learning_rate": 4.924e-05, "loss": 1.8644, "step": 1428 }, { "epoch": 11.432, "grad_norm": 123.75206756591797, "learning_rate": 4.923555555555556e-05, "loss": 1.8424, "step": 1429 }, { "epoch": 11.44, "grad_norm": 31.49465560913086, "learning_rate": 4.923111111111111e-05, "loss": 1.7443, "step": 1430 }, { "epoch": 11.448, "grad_norm": 21.14373779296875, "learning_rate": 4.9226666666666674e-05, "loss": 1.9089, "step": 1431 }, { "epoch": 11.456, "grad_norm": 26.345718383789062, "learning_rate": 4.922222222222222e-05, "loss": 1.5816, "step": 1432 }, { "epoch": 11.464, "grad_norm": 50.744869232177734, "learning_rate": 4.921777777777778e-05, "loss": 1.634, "step": 1433 }, { "epoch": 11.472, "grad_norm": 29.55548095703125, "learning_rate": 4.921333333333333e-05, "loss": 1.4203, "step": 1434 }, { "epoch": 11.48, "grad_norm": 47.46059036254883, "learning_rate": 4.9208888888888894e-05, "loss": 2.7526, "step": 1435 }, { "epoch": 11.488, "grad_norm": 33.49186706542969, "learning_rate": 4.920444444444445e-05, "loss": 2.1168, "step": 1436 }, { "epoch": 11.496, "grad_norm": 46.8537483215332, "learning_rate": 4.92e-05, "loss": 1.8961, "step": 1437 }, { "epoch": 11.504, "grad_norm": 43.26595687866211, "learning_rate": 4.919555555555556e-05, "loss": 1.7647, "step": 1438 }, { "epoch": 11.512, "grad_norm": 26.57328224182129, "learning_rate": 4.919111111111111e-05, "loss": 1.7198, "step": 1439 }, { "epoch": 11.52, "grad_norm": 33.4455451965332, "learning_rate": 4.918666666666667e-05, "loss": 1.6042, "step": 1440 }, { "epoch": 11.528, "grad_norm": 49.060638427734375, "learning_rate": 4.918222222222222e-05, "loss": 1.9838, "step": 1441 }, { "epoch": 11.536, "grad_norm": 49.884185791015625, "learning_rate": 4.917777777777778e-05, "loss": 1.6255, "step": 1442 }, { "epoch": 11.544, "grad_norm": 49.71965026855469, "learning_rate": 4.917333333333334e-05, "loss": 2.0078, "step": 1443 }, { "epoch": 11.552, "grad_norm": 36.57021713256836, "learning_rate": 4.9168888888888894e-05, "loss": 1.5719, "step": 1444 }, { "epoch": 11.56, "grad_norm": 102.64857482910156, "learning_rate": 4.916444444444445e-05, "loss": 1.8343, "step": 1445 }, { "epoch": 11.568, "grad_norm": 126.58103942871094, "learning_rate": 4.9160000000000004e-05, "loss": 1.377, "step": 1446 }, { "epoch": 11.576, "grad_norm": 21.49477195739746, "learning_rate": 4.915555555555556e-05, "loss": 1.5413, "step": 1447 }, { "epoch": 11.584, "grad_norm": 76.19985961914062, "learning_rate": 4.915111111111111e-05, "loss": 1.6273, "step": 1448 }, { "epoch": 11.592, "grad_norm": 54.036781311035156, "learning_rate": 4.914666666666667e-05, "loss": 1.2765, "step": 1449 }, { "epoch": 11.6, "grad_norm": 32.7363395690918, "learning_rate": 4.914222222222222e-05, "loss": 2.2626, "step": 1450 }, { "epoch": 11.608, "grad_norm": 42.454036712646484, "learning_rate": 4.9137777777777785e-05, "loss": 1.4638, "step": 1451 }, { "epoch": 11.616, "grad_norm": 25.707374572753906, "learning_rate": 4.913333333333334e-05, "loss": 1.4664, "step": 1452 }, { "epoch": 11.624, "grad_norm": 976.5609130859375, "learning_rate": 4.912888888888889e-05, "loss": 2.2391, "step": 1453 }, { "epoch": 11.632, "grad_norm": 43.58930587768555, "learning_rate": 4.912444444444444e-05, "loss": 1.7581, "step": 1454 }, { "epoch": 11.64, "grad_norm": 40.776710510253906, "learning_rate": 4.9120000000000004e-05, "loss": 2.2751, "step": 1455 }, { "epoch": 11.648, "grad_norm": 71.43414306640625, "learning_rate": 4.911555555555556e-05, "loss": 2.6286, "step": 1456 }, { "epoch": 11.656, "grad_norm": 36.619834899902344, "learning_rate": 4.9111111111111114e-05, "loss": 1.3771, "step": 1457 }, { "epoch": 11.664, "grad_norm": 38.68801498413086, "learning_rate": 4.910666666666667e-05, "loss": 2.1043, "step": 1458 }, { "epoch": 11.672, "grad_norm": 22.545604705810547, "learning_rate": 4.910222222222223e-05, "loss": 2.0947, "step": 1459 }, { "epoch": 11.68, "grad_norm": 27.805021286010742, "learning_rate": 4.909777777777778e-05, "loss": 1.9542, "step": 1460 }, { "epoch": 11.688, "grad_norm": 106.64395141601562, "learning_rate": 4.909333333333333e-05, "loss": 1.9051, "step": 1461 }, { "epoch": 11.696, "grad_norm": 44.51893615722656, "learning_rate": 4.908888888888889e-05, "loss": 1.6195, "step": 1462 }, { "epoch": 11.704, "grad_norm": 45.81283950805664, "learning_rate": 4.908444444444445e-05, "loss": 2.1417, "step": 1463 }, { "epoch": 11.712, "grad_norm": 90.15739440917969, "learning_rate": 4.9080000000000004e-05, "loss": 1.9168, "step": 1464 }, { "epoch": 11.72, "grad_norm": 45.25575637817383, "learning_rate": 4.907555555555556e-05, "loss": 1.3254, "step": 1465 }, { "epoch": 11.728, "grad_norm": 90.77632141113281, "learning_rate": 4.9071111111111114e-05, "loss": 1.501, "step": 1466 }, { "epoch": 11.736, "grad_norm": 26.196189880371094, "learning_rate": 4.906666666666667e-05, "loss": 1.6547, "step": 1467 }, { "epoch": 11.744, "grad_norm": 58.1985969543457, "learning_rate": 4.9062222222222224e-05, "loss": 1.9757, "step": 1468 }, { "epoch": 11.752, "grad_norm": 26.566675186157227, "learning_rate": 4.905777777777778e-05, "loss": 1.3212, "step": 1469 }, { "epoch": 11.76, "grad_norm": 34.96632766723633, "learning_rate": 4.9053333333333333e-05, "loss": 1.8603, "step": 1470 }, { "epoch": 11.768, "grad_norm": 61.912147521972656, "learning_rate": 4.904888888888889e-05, "loss": 1.8752, "step": 1471 }, { "epoch": 11.776, "grad_norm": 38.96670913696289, "learning_rate": 4.904444444444445e-05, "loss": 1.8905, "step": 1472 }, { "epoch": 11.784, "grad_norm": 122.0973129272461, "learning_rate": 4.9040000000000005e-05, "loss": 1.3235, "step": 1473 }, { "epoch": 11.792, "grad_norm": 36.784934997558594, "learning_rate": 4.903555555555556e-05, "loss": 1.9393, "step": 1474 }, { "epoch": 11.8, "grad_norm": 49.699337005615234, "learning_rate": 4.903111111111111e-05, "loss": 1.7702, "step": 1475 }, { "epoch": 11.808, "grad_norm": 32.65391159057617, "learning_rate": 4.902666666666667e-05, "loss": 1.6098, "step": 1476 }, { "epoch": 11.816, "grad_norm": 37.459938049316406, "learning_rate": 4.9022222222222224e-05, "loss": 1.8174, "step": 1477 }, { "epoch": 11.824, "grad_norm": 43.18106460571289, "learning_rate": 4.901777777777778e-05, "loss": 1.7585, "step": 1478 }, { "epoch": 11.832, "grad_norm": 66.37132263183594, "learning_rate": 4.9013333333333334e-05, "loss": 1.6135, "step": 1479 }, { "epoch": 11.84, "grad_norm": 21.067529678344727, "learning_rate": 4.9008888888888896e-05, "loss": 1.2794, "step": 1480 }, { "epoch": 11.848, "grad_norm": 26.444303512573242, "learning_rate": 4.900444444444445e-05, "loss": 1.7017, "step": 1481 }, { "epoch": 11.856, "grad_norm": 30.635852813720703, "learning_rate": 4.9e-05, "loss": 1.7014, "step": 1482 }, { "epoch": 11.864, "grad_norm": 41.770050048828125, "learning_rate": 4.899555555555555e-05, "loss": 1.1778, "step": 1483 }, { "epoch": 11.872, "grad_norm": 22.772436141967773, "learning_rate": 4.8991111111111115e-05, "loss": 1.7028, "step": 1484 }, { "epoch": 11.88, "grad_norm": 1507.275634765625, "learning_rate": 4.898666666666667e-05, "loss": 1.5434, "step": 1485 }, { "epoch": 11.888, "grad_norm": 50.849571228027344, "learning_rate": 4.8982222222222225e-05, "loss": 2.5421, "step": 1486 }, { "epoch": 11.896, "grad_norm": 24.745784759521484, "learning_rate": 4.897777777777778e-05, "loss": 1.6025, "step": 1487 }, { "epoch": 11.904, "grad_norm": 35.86049270629883, "learning_rate": 4.897333333333334e-05, "loss": 1.605, "step": 1488 }, { "epoch": 11.912, "grad_norm": 35.71167755126953, "learning_rate": 4.896888888888889e-05, "loss": 1.5775, "step": 1489 }, { "epoch": 11.92, "grad_norm": 27.309438705444336, "learning_rate": 4.8964444444444444e-05, "loss": 1.4613, "step": 1490 }, { "epoch": 11.928, "grad_norm": 26.575973510742188, "learning_rate": 4.896e-05, "loss": 1.443, "step": 1491 }, { "epoch": 11.936, "grad_norm": 52.08866882324219, "learning_rate": 4.895555555555556e-05, "loss": 1.8936, "step": 1492 }, { "epoch": 11.943999999999999, "grad_norm": 30.300989151000977, "learning_rate": 4.8951111111111115e-05, "loss": 1.5311, "step": 1493 }, { "epoch": 11.952, "grad_norm": 40.566837310791016, "learning_rate": 4.894666666666667e-05, "loss": 1.5637, "step": 1494 }, { "epoch": 11.96, "grad_norm": 59.92058181762695, "learning_rate": 4.8942222222222225e-05, "loss": 1.702, "step": 1495 }, { "epoch": 11.968, "grad_norm": 55.48751449584961, "learning_rate": 4.893777777777778e-05, "loss": 1.6309, "step": 1496 }, { "epoch": 11.975999999999999, "grad_norm": 44.840476989746094, "learning_rate": 4.8933333333333335e-05, "loss": 1.3539, "step": 1497 }, { "epoch": 11.984, "grad_norm": 57.61119079589844, "learning_rate": 4.892888888888889e-05, "loss": 1.1828, "step": 1498 }, { "epoch": 11.992, "grad_norm": 57.932029724121094, "learning_rate": 4.8924444444444444e-05, "loss": 1.7712, "step": 1499 }, { "epoch": 12.0, "grad_norm": 51.5782356262207, "learning_rate": 4.8920000000000006e-05, "loss": 1.6098, "step": 1500 }, { "epoch": 12.0, "eval_loss": 1.7407810688018799, "eval_map": 0.1824, "eval_map_50": 0.3833, "eval_map_75": 0.1509, "eval_map_Coverall": 0.4528, "eval_map_Face_Shield": 0.1674, "eval_map_Gloves": 0.1034, "eval_map_Goggles": 0.024, "eval_map_Mask": 0.1645, "eval_map_large": 0.2374, "eval_map_medium": 0.0955, "eval_map_small": 0.06, "eval_mar_1": 0.1722, "eval_mar_10": 0.3246, "eval_mar_100": 0.3429, "eval_mar_100_Coverall": 0.6756, "eval_mar_100_Face_Shield": 0.3471, "eval_mar_100_Gloves": 0.2951, "eval_mar_100_Goggles": 0.0219, "eval_mar_100_Mask": 0.375, "eval_mar_large": 0.4194, "eval_mar_medium": 0.1982, "eval_mar_small": 0.1384, "eval_runtime": 0.9268, "eval_samples_per_second": 31.292, "eval_steps_per_second": 2.158, "step": 1500 }, { "epoch": 12.008, "grad_norm": 24.971240997314453, "learning_rate": 4.891555555555556e-05, "loss": 1.8617, "step": 1501 }, { "epoch": 12.016, "grad_norm": 37.09601593017578, "learning_rate": 4.8911111111111116e-05, "loss": 1.4102, "step": 1502 }, { "epoch": 12.024, "grad_norm": 47.28028869628906, "learning_rate": 4.890666666666667e-05, "loss": 1.5889, "step": 1503 }, { "epoch": 12.032, "grad_norm": 39.70038986206055, "learning_rate": 4.8902222222222225e-05, "loss": 1.6348, "step": 1504 }, { "epoch": 12.04, "grad_norm": 59.792388916015625, "learning_rate": 4.889777777777778e-05, "loss": 1.2995, "step": 1505 }, { "epoch": 12.048, "grad_norm": 236.30218505859375, "learning_rate": 4.8893333333333335e-05, "loss": 1.6795, "step": 1506 }, { "epoch": 12.056, "grad_norm": 64.6810531616211, "learning_rate": 4.888888888888889e-05, "loss": 1.8514, "step": 1507 }, { "epoch": 12.064, "grad_norm": 26.911508560180664, "learning_rate": 4.888444444444445e-05, "loss": 1.8125, "step": 1508 }, { "epoch": 12.072, "grad_norm": 22.884227752685547, "learning_rate": 4.8880000000000006e-05, "loss": 2.1599, "step": 1509 }, { "epoch": 12.08, "grad_norm": 40.08557891845703, "learning_rate": 4.8875555555555554e-05, "loss": 1.1438, "step": 1510 }, { "epoch": 12.088, "grad_norm": 21.777254104614258, "learning_rate": 4.887111111111111e-05, "loss": 1.6372, "step": 1511 }, { "epoch": 12.096, "grad_norm": 181.8191680908203, "learning_rate": 4.886666666666667e-05, "loss": 1.6508, "step": 1512 }, { "epoch": 12.104, "grad_norm": 122.3668212890625, "learning_rate": 4.8862222222222226e-05, "loss": 2.2841, "step": 1513 }, { "epoch": 12.112, "grad_norm": 26.108232498168945, "learning_rate": 4.885777777777778e-05, "loss": 1.6091, "step": 1514 }, { "epoch": 12.12, "grad_norm": 35.94224166870117, "learning_rate": 4.8853333333333335e-05, "loss": 1.8632, "step": 1515 }, { "epoch": 12.128, "grad_norm": 70.07221221923828, "learning_rate": 4.884888888888889e-05, "loss": 1.3261, "step": 1516 }, { "epoch": 12.136, "grad_norm": 22.227746963500977, "learning_rate": 4.8844444444444445e-05, "loss": 1.4265, "step": 1517 }, { "epoch": 12.144, "grad_norm": 29.53557014465332, "learning_rate": 4.884e-05, "loss": 1.9972, "step": 1518 }, { "epoch": 12.152, "grad_norm": 20.32901954650879, "learning_rate": 4.8835555555555555e-05, "loss": 1.352, "step": 1519 }, { "epoch": 12.16, "grad_norm": 74.14122772216797, "learning_rate": 4.883111111111111e-05, "loss": 1.3913, "step": 1520 }, { "epoch": 12.168, "grad_norm": 25.00629997253418, "learning_rate": 4.882666666666667e-05, "loss": 1.5953, "step": 1521 }, { "epoch": 12.176, "grad_norm": 183.59169006347656, "learning_rate": 4.8822222222222226e-05, "loss": 1.8313, "step": 1522 }, { "epoch": 12.184, "grad_norm": 49.050636291503906, "learning_rate": 4.881777777777778e-05, "loss": 2.5877, "step": 1523 }, { "epoch": 12.192, "grad_norm": 33.395057678222656, "learning_rate": 4.8813333333333336e-05, "loss": 1.7553, "step": 1524 }, { "epoch": 12.2, "grad_norm": 33.64593505859375, "learning_rate": 4.880888888888889e-05, "loss": 1.7803, "step": 1525 }, { "epoch": 12.208, "grad_norm": 30.4632568359375, "learning_rate": 4.8804444444444445e-05, "loss": 1.1598, "step": 1526 }, { "epoch": 12.216, "grad_norm": 28.06036376953125, "learning_rate": 4.88e-05, "loss": 1.5068, "step": 1527 }, { "epoch": 12.224, "grad_norm": 84.16414642333984, "learning_rate": 4.8795555555555555e-05, "loss": 1.3805, "step": 1528 }, { "epoch": 12.232, "grad_norm": 31.489604949951172, "learning_rate": 4.879111111111112e-05, "loss": 1.5342, "step": 1529 }, { "epoch": 12.24, "grad_norm": 50.249385833740234, "learning_rate": 4.878666666666667e-05, "loss": 1.42, "step": 1530 }, { "epoch": 12.248, "grad_norm": 27.01780128479004, "learning_rate": 4.8782222222222226e-05, "loss": 1.446, "step": 1531 }, { "epoch": 12.256, "grad_norm": 56.420433044433594, "learning_rate": 4.8777777777777775e-05, "loss": 2.1074, "step": 1532 }, { "epoch": 12.264, "grad_norm": 22.21746253967285, "learning_rate": 4.8773333333333336e-05, "loss": 1.7879, "step": 1533 }, { "epoch": 12.272, "grad_norm": 31.982666015625, "learning_rate": 4.876888888888889e-05, "loss": 1.7618, "step": 1534 }, { "epoch": 12.28, "grad_norm": 61.54862976074219, "learning_rate": 4.8764444444444446e-05, "loss": 1.6702, "step": 1535 }, { "epoch": 12.288, "grad_norm": 32.81570053100586, "learning_rate": 4.876e-05, "loss": 2.3419, "step": 1536 }, { "epoch": 12.296, "grad_norm": 39.900718688964844, "learning_rate": 4.875555555555556e-05, "loss": 1.6267, "step": 1537 }, { "epoch": 12.304, "grad_norm": 32.046852111816406, "learning_rate": 4.875111111111112e-05, "loss": 1.565, "step": 1538 }, { "epoch": 12.312, "grad_norm": 31.095491409301758, "learning_rate": 4.8746666666666665e-05, "loss": 1.7509, "step": 1539 }, { "epoch": 12.32, "grad_norm": 24.7758846282959, "learning_rate": 4.874222222222222e-05, "loss": 1.7673, "step": 1540 }, { "epoch": 12.328, "grad_norm": 36.73737335205078, "learning_rate": 4.873777777777778e-05, "loss": 1.3016, "step": 1541 }, { "epoch": 12.336, "grad_norm": 32.36088562011719, "learning_rate": 4.8733333333333337e-05, "loss": 1.6284, "step": 1542 }, { "epoch": 12.344, "grad_norm": 36.007591247558594, "learning_rate": 4.872888888888889e-05, "loss": 2.0594, "step": 1543 }, { "epoch": 12.352, "grad_norm": 50.190826416015625, "learning_rate": 4.8724444444444446e-05, "loss": 1.7418, "step": 1544 }, { "epoch": 12.36, "grad_norm": 41.25205612182617, "learning_rate": 4.872000000000001e-05, "loss": 1.2657, "step": 1545 }, { "epoch": 12.368, "grad_norm": 36.182395935058594, "learning_rate": 4.8715555555555556e-05, "loss": 2.1008, "step": 1546 }, { "epoch": 12.376, "grad_norm": 38.072235107421875, "learning_rate": 4.871111111111111e-05, "loss": 1.9972, "step": 1547 }, { "epoch": 12.384, "grad_norm": 63.67588424682617, "learning_rate": 4.8706666666666666e-05, "loss": 1.6969, "step": 1548 }, { "epoch": 12.392, "grad_norm": 44.46890640258789, "learning_rate": 4.870222222222223e-05, "loss": 1.7676, "step": 1549 }, { "epoch": 12.4, "grad_norm": 45.06453323364258, "learning_rate": 4.869777777777778e-05, "loss": 1.2124, "step": 1550 }, { "epoch": 12.408, "grad_norm": 45.04420852661133, "learning_rate": 4.869333333333334e-05, "loss": 1.412, "step": 1551 }, { "epoch": 12.416, "grad_norm": 34.31333541870117, "learning_rate": 4.868888888888889e-05, "loss": 1.5037, "step": 1552 }, { "epoch": 12.424, "grad_norm": 37.972896575927734, "learning_rate": 4.868444444444445e-05, "loss": 1.6713, "step": 1553 }, { "epoch": 12.432, "grad_norm": 42.45273971557617, "learning_rate": 4.868e-05, "loss": 1.9083, "step": 1554 }, { "epoch": 12.44, "grad_norm": 39.260154724121094, "learning_rate": 4.8675555555555556e-05, "loss": 1.3533, "step": 1555 }, { "epoch": 12.448, "grad_norm": 56.299495697021484, "learning_rate": 4.867111111111111e-05, "loss": 1.8404, "step": 1556 }, { "epoch": 12.456, "grad_norm": 108.91908264160156, "learning_rate": 4.866666666666667e-05, "loss": 1.3747, "step": 1557 }, { "epoch": 12.464, "grad_norm": 38.48956298828125, "learning_rate": 4.866222222222223e-05, "loss": 1.6693, "step": 1558 }, { "epoch": 12.472, "grad_norm": 83.35309600830078, "learning_rate": 4.865777777777778e-05, "loss": 2.0473, "step": 1559 }, { "epoch": 12.48, "grad_norm": 67.27825164794922, "learning_rate": 4.865333333333334e-05, "loss": 1.5872, "step": 1560 }, { "epoch": 12.488, "grad_norm": 101.07713317871094, "learning_rate": 4.864888888888889e-05, "loss": 1.5103, "step": 1561 }, { "epoch": 12.496, "grad_norm": 170.89480590820312, "learning_rate": 4.864444444444445e-05, "loss": 1.7003, "step": 1562 }, { "epoch": 12.504, "grad_norm": 44.39055252075195, "learning_rate": 4.864e-05, "loss": 1.7835, "step": 1563 }, { "epoch": 12.512, "grad_norm": 54.64291000366211, "learning_rate": 4.863555555555556e-05, "loss": 3.1206, "step": 1564 }, { "epoch": 12.52, "grad_norm": 46.735107421875, "learning_rate": 4.863111111111112e-05, "loss": 1.9144, "step": 1565 }, { "epoch": 12.528, "grad_norm": 223.17503356933594, "learning_rate": 4.862666666666667e-05, "loss": 2.1272, "step": 1566 }, { "epoch": 12.536, "grad_norm": 218.38246154785156, "learning_rate": 4.862222222222222e-05, "loss": 1.7781, "step": 1567 }, { "epoch": 12.544, "grad_norm": 74.66094970703125, "learning_rate": 4.8617777777777776e-05, "loss": 1.6883, "step": 1568 }, { "epoch": 12.552, "grad_norm": 57.34697341918945, "learning_rate": 4.861333333333333e-05, "loss": 1.7657, "step": 1569 }, { "epoch": 12.56, "grad_norm": 39.791934967041016, "learning_rate": 4.860888888888889e-05, "loss": 1.7246, "step": 1570 }, { "epoch": 12.568, "grad_norm": 24.407896041870117, "learning_rate": 4.860444444444445e-05, "loss": 2.1999, "step": 1571 }, { "epoch": 12.576, "grad_norm": 59.63742446899414, "learning_rate": 4.86e-05, "loss": 2.0699, "step": 1572 }, { "epoch": 12.584, "grad_norm": 32.49587631225586, "learning_rate": 4.859555555555556e-05, "loss": 1.7424, "step": 1573 }, { "epoch": 12.592, "grad_norm": 36.32060241699219, "learning_rate": 4.859111111111111e-05, "loss": 1.503, "step": 1574 }, { "epoch": 12.6, "grad_norm": 30.09699058532715, "learning_rate": 4.858666666666667e-05, "loss": 1.7072, "step": 1575 }, { "epoch": 12.608, "grad_norm": 23.99260711669922, "learning_rate": 4.858222222222222e-05, "loss": 1.3219, "step": 1576 }, { "epoch": 12.616, "grad_norm": 31.119144439697266, "learning_rate": 4.8577777777777776e-05, "loss": 1.7022, "step": 1577 }, { "epoch": 12.624, "grad_norm": 26.13321876525879, "learning_rate": 4.857333333333334e-05, "loss": 1.0852, "step": 1578 }, { "epoch": 12.632, "grad_norm": 40.80875778198242, "learning_rate": 4.856888888888889e-05, "loss": 1.7992, "step": 1579 }, { "epoch": 12.64, "grad_norm": 32.33632278442383, "learning_rate": 4.856444444444445e-05, "loss": 1.6432, "step": 1580 }, { "epoch": 12.648, "grad_norm": 53.418670654296875, "learning_rate": 4.856e-05, "loss": 1.6302, "step": 1581 }, { "epoch": 12.656, "grad_norm": 39.667808532714844, "learning_rate": 4.855555555555556e-05, "loss": 1.5883, "step": 1582 }, { "epoch": 12.664, "grad_norm": 102.57222747802734, "learning_rate": 4.855111111111111e-05, "loss": 1.7927, "step": 1583 }, { "epoch": 12.672, "grad_norm": 25.199060440063477, "learning_rate": 4.854666666666667e-05, "loss": 2.2284, "step": 1584 }, { "epoch": 12.68, "grad_norm": 33.62412643432617, "learning_rate": 4.854222222222222e-05, "loss": 1.8624, "step": 1585 }, { "epoch": 12.688, "grad_norm": 52.486141204833984, "learning_rate": 4.8537777777777784e-05, "loss": 3.5678, "step": 1586 }, { "epoch": 12.696, "grad_norm": 33.35923385620117, "learning_rate": 4.853333333333334e-05, "loss": 1.5744, "step": 1587 }, { "epoch": 12.704, "grad_norm": 25.363170623779297, "learning_rate": 4.852888888888889e-05, "loss": 1.9338, "step": 1588 }, { "epoch": 12.712, "grad_norm": 135.64810180664062, "learning_rate": 4.852444444444444e-05, "loss": 1.7122, "step": 1589 }, { "epoch": 12.72, "grad_norm": 33.23951721191406, "learning_rate": 4.852e-05, "loss": 1.3218, "step": 1590 }, { "epoch": 12.728, "grad_norm": 113.07582092285156, "learning_rate": 4.851555555555556e-05, "loss": 2.7628, "step": 1591 }, { "epoch": 12.736, "grad_norm": 36.401309967041016, "learning_rate": 4.851111111111111e-05, "loss": 1.5402, "step": 1592 }, { "epoch": 12.744, "grad_norm": 26.78730583190918, "learning_rate": 4.850666666666667e-05, "loss": 2.0129, "step": 1593 }, { "epoch": 12.752, "grad_norm": 31.929471969604492, "learning_rate": 4.850222222222223e-05, "loss": 1.6168, "step": 1594 }, { "epoch": 12.76, "grad_norm": 38.81142807006836, "learning_rate": 4.8497777777777784e-05, "loss": 1.6603, "step": 1595 }, { "epoch": 12.768, "grad_norm": 51.2849235534668, "learning_rate": 4.849333333333333e-05, "loss": 2.0051, "step": 1596 }, { "epoch": 12.776, "grad_norm": 33.68918228149414, "learning_rate": 4.848888888888889e-05, "loss": 1.5399, "step": 1597 }, { "epoch": 12.784, "grad_norm": 31.250856399536133, "learning_rate": 4.848444444444445e-05, "loss": 1.5985, "step": 1598 }, { "epoch": 12.792, "grad_norm": 43.54570388793945, "learning_rate": 4.8480000000000003e-05, "loss": 1.9057, "step": 1599 }, { "epoch": 12.8, "grad_norm": 118.32180786132812, "learning_rate": 4.847555555555556e-05, "loss": 1.2863, "step": 1600 }, { "epoch": 12.808, "grad_norm": 32.62568664550781, "learning_rate": 4.847111111111111e-05, "loss": 1.8942, "step": 1601 }, { "epoch": 12.816, "grad_norm": 63.04256057739258, "learning_rate": 4.8466666666666675e-05, "loss": 1.7127, "step": 1602 }, { "epoch": 12.824, "grad_norm": 29.717588424682617, "learning_rate": 4.846222222222222e-05, "loss": 1.7154, "step": 1603 }, { "epoch": 12.832, "grad_norm": 48.17650604248047, "learning_rate": 4.845777777777778e-05, "loss": 1.8659, "step": 1604 }, { "epoch": 12.84, "grad_norm": 34.274173736572266, "learning_rate": 4.845333333333333e-05, "loss": 1.4597, "step": 1605 }, { "epoch": 12.848, "grad_norm": 47.30525207519531, "learning_rate": 4.8448888888888894e-05, "loss": 1.4323, "step": 1606 }, { "epoch": 12.856, "grad_norm": 28.94491958618164, "learning_rate": 4.844444444444445e-05, "loss": 1.6608, "step": 1607 }, { "epoch": 12.864, "grad_norm": 42.66196060180664, "learning_rate": 4.8440000000000004e-05, "loss": 1.769, "step": 1608 }, { "epoch": 12.872, "grad_norm": 21.822772979736328, "learning_rate": 4.843555555555556e-05, "loss": 1.6368, "step": 1609 }, { "epoch": 12.88, "grad_norm": 33.594451904296875, "learning_rate": 4.8431111111111113e-05, "loss": 1.3191, "step": 1610 }, { "epoch": 12.888, "grad_norm": 65.37452697753906, "learning_rate": 4.842666666666667e-05, "loss": 1.5706, "step": 1611 }, { "epoch": 12.896, "grad_norm": 28.77519989013672, "learning_rate": 4.842222222222222e-05, "loss": 1.3942, "step": 1612 }, { "epoch": 12.904, "grad_norm": 29.54246711730957, "learning_rate": 4.841777777777778e-05, "loss": 1.4102, "step": 1613 }, { "epoch": 12.912, "grad_norm": 37.07331085205078, "learning_rate": 4.841333333333334e-05, "loss": 1.2802, "step": 1614 }, { "epoch": 12.92, "grad_norm": 36.77104568481445, "learning_rate": 4.8408888888888894e-05, "loss": 1.5447, "step": 1615 }, { "epoch": 12.928, "grad_norm": 47.48222732543945, "learning_rate": 4.840444444444445e-05, "loss": 2.4994, "step": 1616 }, { "epoch": 12.936, "grad_norm": 23.522933959960938, "learning_rate": 4.8400000000000004e-05, "loss": 1.4597, "step": 1617 }, { "epoch": 12.943999999999999, "grad_norm": 40.98870849609375, "learning_rate": 4.839555555555556e-05, "loss": 1.5898, "step": 1618 }, { "epoch": 12.952, "grad_norm": 30.841259002685547, "learning_rate": 4.8391111111111114e-05, "loss": 1.4344, "step": 1619 }, { "epoch": 12.96, "grad_norm": 60.87434005737305, "learning_rate": 4.838666666666667e-05, "loss": 1.4049, "step": 1620 }, { "epoch": 12.968, "grad_norm": 24.97718620300293, "learning_rate": 4.8382222222222224e-05, "loss": 1.4986, "step": 1621 }, { "epoch": 12.975999999999999, "grad_norm": 37.921329498291016, "learning_rate": 4.837777777777778e-05, "loss": 1.2448, "step": 1622 }, { "epoch": 12.984, "grad_norm": 25.121416091918945, "learning_rate": 4.837333333333334e-05, "loss": 1.3083, "step": 1623 }, { "epoch": 12.992, "grad_norm": 26.647796630859375, "learning_rate": 4.836888888888889e-05, "loss": 1.4976, "step": 1624 }, { "epoch": 13.0, "grad_norm": 33.63217544555664, "learning_rate": 4.836444444444444e-05, "loss": 1.5719, "step": 1625 }, { "epoch": 13.0, "eval_loss": 1.6639689207077026, "eval_map": 0.167, "eval_map_50": 0.3691, "eval_map_75": 0.1346, "eval_map_Coverall": 0.4009, "eval_map_Face_Shield": 0.1855, "eval_map_Gloves": 0.1237, "eval_map_Goggles": 0.0005, "eval_map_Mask": 0.1245, "eval_map_large": 0.2283, "eval_map_medium": 0.0862, "eval_map_small": 0.0787, "eval_mar_1": 0.1802, "eval_mar_10": 0.3219, "eval_mar_100": 0.3449, "eval_mar_100_Coverall": 0.66, "eval_mar_100_Face_Shield": 0.4176, "eval_mar_100_Gloves": 0.3049, "eval_mar_100_Goggles": 0.0094, "eval_mar_100_Mask": 0.3327, "eval_mar_large": 0.4166, "eval_mar_medium": 0.2049, "eval_mar_small": 0.1209, "eval_runtime": 0.9303, "eval_samples_per_second": 31.172, "eval_steps_per_second": 2.15, "step": 1625 }, { "epoch": 13.008, "grad_norm": 29.804073333740234, "learning_rate": 4.836e-05, "loss": 1.4754, "step": 1626 }, { "epoch": 13.016, "grad_norm": 32.498504638671875, "learning_rate": 4.835555555555556e-05, "loss": 1.4659, "step": 1627 }, { "epoch": 13.024, "grad_norm": 18.703142166137695, "learning_rate": 4.8351111111111114e-05, "loss": 1.2877, "step": 1628 }, { "epoch": 13.032, "grad_norm": 47.520606994628906, "learning_rate": 4.834666666666667e-05, "loss": 1.7267, "step": 1629 }, { "epoch": 13.04, "grad_norm": 45.86697769165039, "learning_rate": 4.8342222222222224e-05, "loss": 1.3886, "step": 1630 }, { "epoch": 13.048, "grad_norm": 22.81896209716797, "learning_rate": 4.833777777777778e-05, "loss": 1.2177, "step": 1631 }, { "epoch": 13.056, "grad_norm": 40.62637710571289, "learning_rate": 4.8333333333333334e-05, "loss": 1.4901, "step": 1632 }, { "epoch": 13.064, "grad_norm": 32.36813735961914, "learning_rate": 4.832888888888889e-05, "loss": 1.3758, "step": 1633 }, { "epoch": 13.072, "grad_norm": 41.495384216308594, "learning_rate": 4.832444444444444e-05, "loss": 1.7427, "step": 1634 }, { "epoch": 13.08, "grad_norm": 36.14820861816406, "learning_rate": 4.8320000000000005e-05, "loss": 1.6755, "step": 1635 }, { "epoch": 13.088, "grad_norm": 31.270263671875, "learning_rate": 4.831555555555556e-05, "loss": 1.7975, "step": 1636 }, { "epoch": 13.096, "grad_norm": 41.49658203125, "learning_rate": 4.8311111111111115e-05, "loss": 1.5851, "step": 1637 }, { "epoch": 13.104, "grad_norm": 27.64554786682129, "learning_rate": 4.830666666666667e-05, "loss": 1.4313, "step": 1638 }, { "epoch": 13.112, "grad_norm": 62.9991340637207, "learning_rate": 4.8302222222222224e-05, "loss": 1.4807, "step": 1639 }, { "epoch": 13.12, "grad_norm": 91.41769409179688, "learning_rate": 4.829777777777778e-05, "loss": 1.692, "step": 1640 }, { "epoch": 13.128, "grad_norm": 87.60406494140625, "learning_rate": 4.8293333333333334e-05, "loss": 1.2317, "step": 1641 }, { "epoch": 13.136, "grad_norm": 236.17832946777344, "learning_rate": 4.828888888888889e-05, "loss": 1.5823, "step": 1642 }, { "epoch": 13.144, "grad_norm": 26.030546188354492, "learning_rate": 4.828444444444445e-05, "loss": 1.3066, "step": 1643 }, { "epoch": 13.152, "grad_norm": 49.27744674682617, "learning_rate": 4.8280000000000005e-05, "loss": 1.3558, "step": 1644 }, { "epoch": 13.16, "grad_norm": 69.088134765625, "learning_rate": 4.827555555555556e-05, "loss": 1.8024, "step": 1645 }, { "epoch": 13.168, "grad_norm": 43.48603439331055, "learning_rate": 4.827111111111111e-05, "loss": 1.6405, "step": 1646 }, { "epoch": 13.176, "grad_norm": 54.30669021606445, "learning_rate": 4.826666666666667e-05, "loss": 1.6785, "step": 1647 }, { "epoch": 13.184, "grad_norm": 21.219253540039062, "learning_rate": 4.8262222222222225e-05, "loss": 1.5423, "step": 1648 }, { "epoch": 13.192, "grad_norm": 49.2487907409668, "learning_rate": 4.825777777777778e-05, "loss": 1.5299, "step": 1649 }, { "epoch": 13.2, "grad_norm": 506.5839538574219, "learning_rate": 4.8253333333333334e-05, "loss": 1.9609, "step": 1650 }, { "epoch": 13.208, "grad_norm": 19.172603607177734, "learning_rate": 4.8248888888888896e-05, "loss": 2.0377, "step": 1651 }, { "epoch": 13.216, "grad_norm": 36.91635513305664, "learning_rate": 4.824444444444445e-05, "loss": 1.3439, "step": 1652 }, { "epoch": 13.224, "grad_norm": 67.1836166381836, "learning_rate": 4.824e-05, "loss": 1.124, "step": 1653 }, { "epoch": 13.232, "grad_norm": 57.11772537231445, "learning_rate": 4.8235555555555554e-05, "loss": 1.5379, "step": 1654 }, { "epoch": 13.24, "grad_norm": 18.047574996948242, "learning_rate": 4.8231111111111115e-05, "loss": 1.2438, "step": 1655 }, { "epoch": 13.248, "grad_norm": 22.020875930786133, "learning_rate": 4.822666666666667e-05, "loss": 2.0912, "step": 1656 }, { "epoch": 13.256, "grad_norm": 26.416488647460938, "learning_rate": 4.8222222222222225e-05, "loss": 1.2757, "step": 1657 }, { "epoch": 13.264, "grad_norm": 34.20475769042969, "learning_rate": 4.821777777777778e-05, "loss": 1.3206, "step": 1658 }, { "epoch": 13.272, "grad_norm": 21.081764221191406, "learning_rate": 4.8213333333333335e-05, "loss": 1.2692, "step": 1659 }, { "epoch": 13.28, "grad_norm": 23.445354461669922, "learning_rate": 4.820888888888889e-05, "loss": 1.5172, "step": 1660 }, { "epoch": 13.288, "grad_norm": 34.43635177612305, "learning_rate": 4.8204444444444444e-05, "loss": 2.2983, "step": 1661 }, { "epoch": 13.296, "grad_norm": 18.79380226135254, "learning_rate": 4.82e-05, "loss": 1.7993, "step": 1662 }, { "epoch": 13.304, "grad_norm": 103.85465240478516, "learning_rate": 4.819555555555556e-05, "loss": 1.524, "step": 1663 }, { "epoch": 13.312, "grad_norm": 21.103172302246094, "learning_rate": 4.8191111111111116e-05, "loss": 2.0452, "step": 1664 }, { "epoch": 13.32, "grad_norm": 21.729555130004883, "learning_rate": 4.818666666666667e-05, "loss": 1.5614, "step": 1665 }, { "epoch": 13.328, "grad_norm": 30.194194793701172, "learning_rate": 4.8182222222222225e-05, "loss": 1.2968, "step": 1666 }, { "epoch": 13.336, "grad_norm": 16.55823516845703, "learning_rate": 4.817777777777778e-05, "loss": 1.8026, "step": 1667 }, { "epoch": 13.344, "grad_norm": 32.76011276245117, "learning_rate": 4.8173333333333335e-05, "loss": 1.6227, "step": 1668 }, { "epoch": 13.352, "grad_norm": 40.29022979736328, "learning_rate": 4.816888888888889e-05, "loss": 1.4826, "step": 1669 }, { "epoch": 13.36, "grad_norm": 23.210119247436523, "learning_rate": 4.8164444444444445e-05, "loss": 1.4112, "step": 1670 }, { "epoch": 13.368, "grad_norm": 22.139385223388672, "learning_rate": 4.816e-05, "loss": 2.2145, "step": 1671 }, { "epoch": 13.376, "grad_norm": 29.8574275970459, "learning_rate": 4.815555555555556e-05, "loss": 1.2029, "step": 1672 }, { "epoch": 13.384, "grad_norm": 47.32640075683594, "learning_rate": 4.8151111111111116e-05, "loss": 1.9108, "step": 1673 }, { "epoch": 13.392, "grad_norm": 56.22624588012695, "learning_rate": 4.814666666666667e-05, "loss": 1.7694, "step": 1674 }, { "epoch": 13.4, "grad_norm": 47.01786804199219, "learning_rate": 4.814222222222222e-05, "loss": 1.2552, "step": 1675 }, { "epoch": 13.408, "grad_norm": 55.128238677978516, "learning_rate": 4.813777777777778e-05, "loss": 2.0013, "step": 1676 }, { "epoch": 13.416, "grad_norm": 75.84793090820312, "learning_rate": 4.8133333333333336e-05, "loss": 1.5387, "step": 1677 }, { "epoch": 13.424, "grad_norm": 28.790597915649414, "learning_rate": 4.812888888888889e-05, "loss": 3.063, "step": 1678 }, { "epoch": 13.432, "grad_norm": 55.848663330078125, "learning_rate": 4.8124444444444445e-05, "loss": 1.7532, "step": 1679 }, { "epoch": 13.44, "grad_norm": 50.5234375, "learning_rate": 4.812000000000001e-05, "loss": 1.2865, "step": 1680 }, { "epoch": 13.448, "grad_norm": 35.880882263183594, "learning_rate": 4.8115555555555555e-05, "loss": 1.3254, "step": 1681 }, { "epoch": 13.456, "grad_norm": 63.15653610229492, "learning_rate": 4.811111111111111e-05, "loss": 2.3753, "step": 1682 }, { "epoch": 13.464, "grad_norm": 59.70771026611328, "learning_rate": 4.8106666666666665e-05, "loss": 1.5999, "step": 1683 }, { "epoch": 13.472, "grad_norm": 24.692949295043945, "learning_rate": 4.8102222222222226e-05, "loss": 1.4149, "step": 1684 }, { "epoch": 13.48, "grad_norm": 105.77168273925781, "learning_rate": 4.809777777777778e-05, "loss": 2.0951, "step": 1685 }, { "epoch": 13.488, "grad_norm": 42.0994758605957, "learning_rate": 4.8093333333333336e-05, "loss": 1.2637, "step": 1686 }, { "epoch": 13.496, "grad_norm": 30.603822708129883, "learning_rate": 4.808888888888889e-05, "loss": 1.6667, "step": 1687 }, { "epoch": 13.504, "grad_norm": 46.398651123046875, "learning_rate": 4.8084444444444446e-05, "loss": 1.2462, "step": 1688 }, { "epoch": 13.512, "grad_norm": 606.0968017578125, "learning_rate": 4.808e-05, "loss": 1.3148, "step": 1689 }, { "epoch": 13.52, "grad_norm": 38.780941009521484, "learning_rate": 4.8075555555555555e-05, "loss": 1.7389, "step": 1690 }, { "epoch": 13.528, "grad_norm": 49.47526931762695, "learning_rate": 4.807111111111111e-05, "loss": 1.6494, "step": 1691 }, { "epoch": 13.536, "grad_norm": 119.72627258300781, "learning_rate": 4.806666666666667e-05, "loss": 1.5427, "step": 1692 }, { "epoch": 13.544, "grad_norm": 43.62034606933594, "learning_rate": 4.8062222222222227e-05, "loss": 1.5333, "step": 1693 }, { "epoch": 13.552, "grad_norm": 23.878963470458984, "learning_rate": 4.805777777777778e-05, "loss": 1.7287, "step": 1694 }, { "epoch": 13.56, "grad_norm": 44.85210418701172, "learning_rate": 4.8053333333333336e-05, "loss": 1.7391, "step": 1695 }, { "epoch": 13.568, "grad_norm": 36.23694610595703, "learning_rate": 4.804888888888889e-05, "loss": 3.1597, "step": 1696 }, { "epoch": 13.576, "grad_norm": 31.055749893188477, "learning_rate": 4.8044444444444446e-05, "loss": 1.3641, "step": 1697 }, { "epoch": 13.584, "grad_norm": 44.4063835144043, "learning_rate": 4.804e-05, "loss": 1.6046, "step": 1698 }, { "epoch": 13.592, "grad_norm": 65.9734878540039, "learning_rate": 4.8035555555555556e-05, "loss": 2.0161, "step": 1699 }, { "epoch": 13.6, "grad_norm": 67.80111694335938, "learning_rate": 4.803111111111112e-05, "loss": 1.6266, "step": 1700 }, { "epoch": 13.608, "grad_norm": 25.632511138916016, "learning_rate": 4.802666666666667e-05, "loss": 2.1857, "step": 1701 }, { "epoch": 13.616, "grad_norm": 35.321258544921875, "learning_rate": 4.802222222222223e-05, "loss": 1.7695, "step": 1702 }, { "epoch": 13.624, "grad_norm": 60.057762145996094, "learning_rate": 4.8017777777777775e-05, "loss": 2.0313, "step": 1703 }, { "epoch": 13.632, "grad_norm": 32.16096115112305, "learning_rate": 4.801333333333334e-05, "loss": 1.7219, "step": 1704 }, { "epoch": 13.64, "grad_norm": 41.75954818725586, "learning_rate": 4.800888888888889e-05, "loss": 1.7335, "step": 1705 }, { "epoch": 13.648, "grad_norm": 63.39699172973633, "learning_rate": 4.8004444444444446e-05, "loss": 1.2246, "step": 1706 }, { "epoch": 13.656, "grad_norm": 28.967958450317383, "learning_rate": 4.8e-05, "loss": 1.6663, "step": 1707 }, { "epoch": 13.664, "grad_norm": 26.42867660522461, "learning_rate": 4.799555555555556e-05, "loss": 1.0585, "step": 1708 }, { "epoch": 13.672, "grad_norm": 16.41533660888672, "learning_rate": 4.799111111111112e-05, "loss": 1.44, "step": 1709 }, { "epoch": 13.68, "grad_norm": 27.76900291442871, "learning_rate": 4.7986666666666666e-05, "loss": 1.4139, "step": 1710 }, { "epoch": 13.688, "grad_norm": 24.501296997070312, "learning_rate": 4.798222222222222e-05, "loss": 1.9565, "step": 1711 }, { "epoch": 13.696, "grad_norm": 27.313440322875977, "learning_rate": 4.797777777777778e-05, "loss": 1.4998, "step": 1712 }, { "epoch": 13.704, "grad_norm": 117.18897247314453, "learning_rate": 4.797333333333334e-05, "loss": 1.5545, "step": 1713 }, { "epoch": 13.712, "grad_norm": 51.706485748291016, "learning_rate": 4.796888888888889e-05, "loss": 1.8634, "step": 1714 }, { "epoch": 13.72, "grad_norm": 32.16521453857422, "learning_rate": 4.796444444444445e-05, "loss": 1.9311, "step": 1715 }, { "epoch": 13.728, "grad_norm": 37.353271484375, "learning_rate": 4.796e-05, "loss": 1.7668, "step": 1716 }, { "epoch": 13.736, "grad_norm": 39.058380126953125, "learning_rate": 4.7955555555555556e-05, "loss": 1.8899, "step": 1717 }, { "epoch": 13.744, "grad_norm": 41.247230529785156, "learning_rate": 4.795111111111111e-05, "loss": 1.5134, "step": 1718 }, { "epoch": 13.752, "grad_norm": 28.9411678314209, "learning_rate": 4.7946666666666666e-05, "loss": 1.5747, "step": 1719 }, { "epoch": 13.76, "grad_norm": 33.24557113647461, "learning_rate": 4.794222222222223e-05, "loss": 1.4368, "step": 1720 }, { "epoch": 13.768, "grad_norm": 42.041873931884766, "learning_rate": 4.793777777777778e-05, "loss": 1.836, "step": 1721 }, { "epoch": 13.776, "grad_norm": 22.31279182434082, "learning_rate": 4.793333333333334e-05, "loss": 1.5154, "step": 1722 }, { "epoch": 13.784, "grad_norm": 77.3053207397461, "learning_rate": 4.792888888888889e-05, "loss": 1.4674, "step": 1723 }, { "epoch": 13.792, "grad_norm": 32.24932861328125, "learning_rate": 4.792444444444445e-05, "loss": 1.5626, "step": 1724 }, { "epoch": 13.8, "grad_norm": 50.66033172607422, "learning_rate": 4.792e-05, "loss": 1.9483, "step": 1725 }, { "epoch": 13.808, "grad_norm": 67.49653625488281, "learning_rate": 4.791555555555556e-05, "loss": 1.493, "step": 1726 }, { "epoch": 13.816, "grad_norm": 40.4940185546875, "learning_rate": 4.791111111111111e-05, "loss": 1.4864, "step": 1727 }, { "epoch": 13.824, "grad_norm": 95.41165161132812, "learning_rate": 4.7906666666666667e-05, "loss": 1.5096, "step": 1728 }, { "epoch": 13.832, "grad_norm": 75.73592376708984, "learning_rate": 4.790222222222223e-05, "loss": 1.461, "step": 1729 }, { "epoch": 13.84, "grad_norm": 20.404207229614258, "learning_rate": 4.789777777777778e-05, "loss": 1.8209, "step": 1730 }, { "epoch": 13.848, "grad_norm": 68.56134033203125, "learning_rate": 4.789333333333334e-05, "loss": 2.0143, "step": 1731 }, { "epoch": 13.856, "grad_norm": 34.24071502685547, "learning_rate": 4.7888888888888886e-05, "loss": 1.6011, "step": 1732 }, { "epoch": 13.864, "grad_norm": 23.618690490722656, "learning_rate": 4.788444444444445e-05, "loss": 1.7973, "step": 1733 }, { "epoch": 13.872, "grad_norm": 55.74674987792969, "learning_rate": 4.788e-05, "loss": 1.7924, "step": 1734 }, { "epoch": 13.88, "grad_norm": 28.083627700805664, "learning_rate": 4.787555555555556e-05, "loss": 1.5277, "step": 1735 }, { "epoch": 13.888, "grad_norm": 22.935195922851562, "learning_rate": 4.787111111111111e-05, "loss": 1.7278, "step": 1736 }, { "epoch": 13.896, "grad_norm": 51.80728530883789, "learning_rate": 4.7866666666666674e-05, "loss": 2.3192, "step": 1737 }, { "epoch": 13.904, "grad_norm": 45.050174713134766, "learning_rate": 4.786222222222222e-05, "loss": 1.6677, "step": 1738 }, { "epoch": 13.912, "grad_norm": 16.62242889404297, "learning_rate": 4.7857777777777777e-05, "loss": 1.5021, "step": 1739 }, { "epoch": 13.92, "grad_norm": 97.7150650024414, "learning_rate": 4.785333333333333e-05, "loss": 1.1169, "step": 1740 }, { "epoch": 13.928, "grad_norm": 38.34442901611328, "learning_rate": 4.784888888888889e-05, "loss": 1.9958, "step": 1741 }, { "epoch": 13.936, "grad_norm": 35.1468391418457, "learning_rate": 4.784444444444445e-05, "loss": 1.2665, "step": 1742 }, { "epoch": 13.943999999999999, "grad_norm": 23.09947967529297, "learning_rate": 4.784e-05, "loss": 1.7093, "step": 1743 }, { "epoch": 13.952, "grad_norm": 147.43528747558594, "learning_rate": 4.783555555555556e-05, "loss": 2.4406, "step": 1744 }, { "epoch": 13.96, "grad_norm": 28.669841766357422, "learning_rate": 4.783111111111111e-05, "loss": 1.6769, "step": 1745 }, { "epoch": 13.968, "grad_norm": 27.910953521728516, "learning_rate": 4.782666666666667e-05, "loss": 1.411, "step": 1746 }, { "epoch": 13.975999999999999, "grad_norm": 50.44673156738281, "learning_rate": 4.782222222222222e-05, "loss": 1.4948, "step": 1747 }, { "epoch": 13.984, "grad_norm": 27.25141716003418, "learning_rate": 4.781777777777778e-05, "loss": 1.8285, "step": 1748 }, { "epoch": 13.992, "grad_norm": 33.584373474121094, "learning_rate": 4.781333333333334e-05, "loss": 1.409, "step": 1749 }, { "epoch": 14.0, "grad_norm": 28.1956729888916, "learning_rate": 4.7808888888888893e-05, "loss": 1.5674, "step": 1750 }, { "epoch": 14.0, "eval_loss": 1.5896633863449097, "eval_map": 0.1671, "eval_map_50": 0.3783, "eval_map_75": 0.1313, "eval_map_Coverall": 0.4452, "eval_map_Face_Shield": 0.15, "eval_map_Gloves": 0.1011, "eval_map_Goggles": 0.0157, "eval_map_Mask": 0.1234, "eval_map_large": 0.2201, "eval_map_medium": 0.0872, "eval_map_small": 0.0677, "eval_mar_1": 0.1808, "eval_mar_10": 0.338, "eval_mar_100": 0.3645, "eval_mar_100_Coverall": 0.6822, "eval_mar_100_Face_Shield": 0.4824, "eval_mar_100_Gloves": 0.2656, "eval_mar_100_Goggles": 0.0656, "eval_mar_100_Mask": 0.3269, "eval_mar_large": 0.4443, "eval_mar_medium": 0.2249, "eval_mar_small": 0.0902, "eval_runtime": 1.0524, "eval_samples_per_second": 27.555, "eval_steps_per_second": 1.9, "step": 1750 }, { "epoch": 14.008, "grad_norm": 31.91731071472168, "learning_rate": 4.780444444444445e-05, "loss": 1.6225, "step": 1751 }, { "epoch": 14.016, "grad_norm": 35.05598068237305, "learning_rate": 4.78e-05, "loss": 1.5645, "step": 1752 }, { "epoch": 14.024, "grad_norm": 49.94749450683594, "learning_rate": 4.779555555555556e-05, "loss": 1.3491, "step": 1753 }, { "epoch": 14.032, "grad_norm": 21.182964324951172, "learning_rate": 4.779111111111111e-05, "loss": 1.9233, "step": 1754 }, { "epoch": 14.04, "grad_norm": 20.94676399230957, "learning_rate": 4.778666666666667e-05, "loss": 1.2099, "step": 1755 }, { "epoch": 14.048, "grad_norm": 23.19788360595703, "learning_rate": 4.778222222222222e-05, "loss": 1.7565, "step": 1756 }, { "epoch": 14.056, "grad_norm": 21.31728172302246, "learning_rate": 4.7777777777777784e-05, "loss": 1.3241, "step": 1757 }, { "epoch": 14.064, "grad_norm": 21.633813858032227, "learning_rate": 4.777333333333334e-05, "loss": 1.7491, "step": 1758 }, { "epoch": 14.072, "grad_norm": 29.48348617553711, "learning_rate": 4.7768888888888894e-05, "loss": 1.5081, "step": 1759 }, { "epoch": 14.08, "grad_norm": 51.59929275512695, "learning_rate": 4.776444444444444e-05, "loss": 1.4423, "step": 1760 }, { "epoch": 14.088, "grad_norm": 46.229637145996094, "learning_rate": 4.7760000000000004e-05, "loss": 1.2529, "step": 1761 }, { "epoch": 14.096, "grad_norm": 34.74216842651367, "learning_rate": 4.775555555555556e-05, "loss": 1.6931, "step": 1762 }, { "epoch": 14.104, "grad_norm": 23.194704055786133, "learning_rate": 4.775111111111111e-05, "loss": 1.6488, "step": 1763 }, { "epoch": 14.112, "grad_norm": 1825.9862060546875, "learning_rate": 4.774666666666667e-05, "loss": 1.6944, "step": 1764 }, { "epoch": 14.12, "grad_norm": 28.853179931640625, "learning_rate": 4.774222222222223e-05, "loss": 1.3864, "step": 1765 }, { "epoch": 14.128, "grad_norm": 42.53569793701172, "learning_rate": 4.7737777777777785e-05, "loss": 1.9613, "step": 1766 }, { "epoch": 14.136, "grad_norm": 101.07852172851562, "learning_rate": 4.773333333333333e-05, "loss": 2.3648, "step": 1767 }, { "epoch": 14.144, "grad_norm": 23.535749435424805, "learning_rate": 4.772888888888889e-05, "loss": 1.6706, "step": 1768 }, { "epoch": 14.152, "grad_norm": 90.72325134277344, "learning_rate": 4.772444444444445e-05, "loss": 1.6106, "step": 1769 }, { "epoch": 14.16, "grad_norm": 42.552188873291016, "learning_rate": 4.7720000000000004e-05, "loss": 1.6499, "step": 1770 }, { "epoch": 14.168, "grad_norm": 36.22928237915039, "learning_rate": 4.771555555555556e-05, "loss": 1.6248, "step": 1771 }, { "epoch": 14.176, "grad_norm": 45.756710052490234, "learning_rate": 4.7711111111111114e-05, "loss": 1.4355, "step": 1772 }, { "epoch": 14.184, "grad_norm": 50.71793746948242, "learning_rate": 4.770666666666667e-05, "loss": 1.6088, "step": 1773 }, { "epoch": 14.192, "grad_norm": 32.795047760009766, "learning_rate": 4.770222222222222e-05, "loss": 1.9818, "step": 1774 }, { "epoch": 14.2, "grad_norm": 72.98542022705078, "learning_rate": 4.769777777777778e-05, "loss": 1.6378, "step": 1775 }, { "epoch": 14.208, "grad_norm": 37.340660095214844, "learning_rate": 4.769333333333333e-05, "loss": 1.4647, "step": 1776 }, { "epoch": 14.216, "grad_norm": 30.252254486083984, "learning_rate": 4.768888888888889e-05, "loss": 1.9136, "step": 1777 }, { "epoch": 14.224, "grad_norm": 23.607656478881836, "learning_rate": 4.768444444444445e-05, "loss": 1.3297, "step": 1778 }, { "epoch": 14.232, "grad_norm": 32.125892639160156, "learning_rate": 4.7680000000000004e-05, "loss": 1.6446, "step": 1779 }, { "epoch": 14.24, "grad_norm": 25.857160568237305, "learning_rate": 4.767555555555556e-05, "loss": 1.3021, "step": 1780 }, { "epoch": 14.248, "grad_norm": 63.5899658203125, "learning_rate": 4.7671111111111114e-05, "loss": 1.4602, "step": 1781 }, { "epoch": 14.256, "grad_norm": 30.656171798706055, "learning_rate": 4.766666666666667e-05, "loss": 2.6286, "step": 1782 }, { "epoch": 14.264, "grad_norm": 25.92660903930664, "learning_rate": 4.7662222222222224e-05, "loss": 1.3214, "step": 1783 }, { "epoch": 14.272, "grad_norm": 34.290836334228516, "learning_rate": 4.765777777777778e-05, "loss": 1.977, "step": 1784 }, { "epoch": 14.28, "grad_norm": 26.93941307067871, "learning_rate": 4.765333333333333e-05, "loss": 1.2845, "step": 1785 }, { "epoch": 14.288, "grad_norm": 36.111000061035156, "learning_rate": 4.7648888888888895e-05, "loss": 1.4273, "step": 1786 }, { "epoch": 14.296, "grad_norm": 20.190574645996094, "learning_rate": 4.764444444444445e-05, "loss": 1.2629, "step": 1787 }, { "epoch": 14.304, "grad_norm": 62.64414978027344, "learning_rate": 4.7640000000000005e-05, "loss": 1.6826, "step": 1788 }, { "epoch": 14.312, "grad_norm": 35.722415924072266, "learning_rate": 4.763555555555555e-05, "loss": 1.3843, "step": 1789 }, { "epoch": 14.32, "grad_norm": 56.78360366821289, "learning_rate": 4.7631111111111114e-05, "loss": 1.9253, "step": 1790 }, { "epoch": 14.328, "grad_norm": 65.19239044189453, "learning_rate": 4.762666666666667e-05, "loss": 1.2981, "step": 1791 }, { "epoch": 14.336, "grad_norm": 112.53553771972656, "learning_rate": 4.7622222222222224e-05, "loss": 1.3624, "step": 1792 }, { "epoch": 14.344, "grad_norm": 54.15336608886719, "learning_rate": 4.761777777777778e-05, "loss": 1.1363, "step": 1793 }, { "epoch": 14.352, "grad_norm": 49.53450393676758, "learning_rate": 4.761333333333334e-05, "loss": 1.3594, "step": 1794 }, { "epoch": 14.36, "grad_norm": 91.6377944946289, "learning_rate": 4.760888888888889e-05, "loss": 1.2182, "step": 1795 }, { "epoch": 14.368, "grad_norm": 41.71139907836914, "learning_rate": 4.7604444444444443e-05, "loss": 1.2485, "step": 1796 }, { "epoch": 14.376, "grad_norm": 44.94508743286133, "learning_rate": 4.76e-05, "loss": 1.4854, "step": 1797 }, { "epoch": 14.384, "grad_norm": 365.88323974609375, "learning_rate": 4.759555555555556e-05, "loss": 1.8264, "step": 1798 }, { "epoch": 14.392, "grad_norm": 33.7394905090332, "learning_rate": 4.7591111111111115e-05, "loss": 1.4888, "step": 1799 }, { "epoch": 14.4, "grad_norm": 31.494741439819336, "learning_rate": 4.758666666666667e-05, "loss": 1.7991, "step": 1800 }, { "epoch": 14.408, "grad_norm": 25.470157623291016, "learning_rate": 4.7582222222222224e-05, "loss": 1.6686, "step": 1801 }, { "epoch": 14.416, "grad_norm": 34.816646575927734, "learning_rate": 4.757777777777778e-05, "loss": 1.6104, "step": 1802 }, { "epoch": 14.424, "grad_norm": 48.1954460144043, "learning_rate": 4.7573333333333334e-05, "loss": 1.5846, "step": 1803 }, { "epoch": 14.432, "grad_norm": 32.92287826538086, "learning_rate": 4.756888888888889e-05, "loss": 1.5497, "step": 1804 }, { "epoch": 14.44, "grad_norm": 89.19135284423828, "learning_rate": 4.7564444444444444e-05, "loss": 1.4907, "step": 1805 }, { "epoch": 14.448, "grad_norm": 22.903982162475586, "learning_rate": 4.7560000000000005e-05, "loss": 1.668, "step": 1806 }, { "epoch": 14.456, "grad_norm": 22.908283233642578, "learning_rate": 4.755555555555556e-05, "loss": 1.8039, "step": 1807 }, { "epoch": 14.464, "grad_norm": 78.60103607177734, "learning_rate": 4.7551111111111115e-05, "loss": 1.4027, "step": 1808 }, { "epoch": 14.472, "grad_norm": 43.56050109863281, "learning_rate": 4.754666666666667e-05, "loss": 1.1752, "step": 1809 }, { "epoch": 14.48, "grad_norm": 42.936767578125, "learning_rate": 4.7542222222222225e-05, "loss": 1.1577, "step": 1810 }, { "epoch": 14.488, "grad_norm": 136.4442138671875, "learning_rate": 4.753777777777778e-05, "loss": 1.8992, "step": 1811 }, { "epoch": 14.496, "grad_norm": 28.74020767211914, "learning_rate": 4.7533333333333334e-05, "loss": 1.6084, "step": 1812 }, { "epoch": 14.504, "grad_norm": 122.27903747558594, "learning_rate": 4.752888888888889e-05, "loss": 1.3402, "step": 1813 }, { "epoch": 14.512, "grad_norm": 21.91157341003418, "learning_rate": 4.752444444444445e-05, "loss": 3.3947, "step": 1814 }, { "epoch": 14.52, "grad_norm": 286.6484375, "learning_rate": 4.7520000000000006e-05, "loss": 1.6022, "step": 1815 }, { "epoch": 14.528, "grad_norm": 21.46245765686035, "learning_rate": 4.751555555555556e-05, "loss": 1.5325, "step": 1816 }, { "epoch": 14.536, "grad_norm": 35.051570892333984, "learning_rate": 4.751111111111111e-05, "loss": 1.6452, "step": 1817 }, { "epoch": 14.544, "grad_norm": 36.215518951416016, "learning_rate": 4.750666666666667e-05, "loss": 1.2428, "step": 1818 }, { "epoch": 14.552, "grad_norm": 85.47265625, "learning_rate": 4.7502222222222225e-05, "loss": 1.1717, "step": 1819 }, { "epoch": 14.56, "grad_norm": 35.21894836425781, "learning_rate": 4.749777777777778e-05, "loss": 1.6228, "step": 1820 }, { "epoch": 14.568, "grad_norm": 62.80316162109375, "learning_rate": 4.7493333333333335e-05, "loss": 1.5394, "step": 1821 }, { "epoch": 14.576, "grad_norm": 37.34904098510742, "learning_rate": 4.7488888888888897e-05, "loss": 1.3267, "step": 1822 }, { "epoch": 14.584, "grad_norm": 32.40422821044922, "learning_rate": 4.748444444444445e-05, "loss": 1.7086, "step": 1823 }, { "epoch": 14.592, "grad_norm": 24.072097778320312, "learning_rate": 4.748e-05, "loss": 1.4749, "step": 1824 }, { "epoch": 14.6, "grad_norm": 54.10324478149414, "learning_rate": 4.7475555555555554e-05, "loss": 2.0152, "step": 1825 }, { "epoch": 14.608, "grad_norm": 43.7853889465332, "learning_rate": 4.747111111111111e-05, "loss": 1.7083, "step": 1826 }, { "epoch": 14.616, "grad_norm": 29.611858367919922, "learning_rate": 4.746666666666667e-05, "loss": 1.1944, "step": 1827 }, { "epoch": 14.624, "grad_norm": 24.244632720947266, "learning_rate": 4.7462222222222226e-05, "loss": 1.5427, "step": 1828 }, { "epoch": 14.632, "grad_norm": 53.84700393676758, "learning_rate": 4.745777777777778e-05, "loss": 1.7118, "step": 1829 }, { "epoch": 14.64, "grad_norm": 46.4376220703125, "learning_rate": 4.7453333333333335e-05, "loss": 1.7429, "step": 1830 }, { "epoch": 14.648, "grad_norm": 34.95664978027344, "learning_rate": 4.744888888888889e-05, "loss": 2.0863, "step": 1831 }, { "epoch": 14.656, "grad_norm": 42.26920700073242, "learning_rate": 4.7444444444444445e-05, "loss": 1.2803, "step": 1832 }, { "epoch": 14.664, "grad_norm": 54.10430908203125, "learning_rate": 4.744e-05, "loss": 1.2758, "step": 1833 }, { "epoch": 14.672, "grad_norm": 59.63707733154297, "learning_rate": 4.7435555555555555e-05, "loss": 1.7227, "step": 1834 }, { "epoch": 14.68, "grad_norm": 39.34476852416992, "learning_rate": 4.7431111111111116e-05, "loss": 1.6254, "step": 1835 }, { "epoch": 14.688, "grad_norm": 27.00258445739746, "learning_rate": 4.742666666666667e-05, "loss": 1.769, "step": 1836 }, { "epoch": 14.696, "grad_norm": 41.38508605957031, "learning_rate": 4.7422222222222226e-05, "loss": 1.4304, "step": 1837 }, { "epoch": 14.704, "grad_norm": 31.092819213867188, "learning_rate": 4.741777777777778e-05, "loss": 1.6808, "step": 1838 }, { "epoch": 14.712, "grad_norm": 45.05617904663086, "learning_rate": 4.7413333333333336e-05, "loss": 1.8861, "step": 1839 }, { "epoch": 14.72, "grad_norm": 34.764896392822266, "learning_rate": 4.740888888888889e-05, "loss": 1.5633, "step": 1840 }, { "epoch": 14.728, "grad_norm": 149.4048309326172, "learning_rate": 4.7404444444444445e-05, "loss": 1.4932, "step": 1841 }, { "epoch": 14.736, "grad_norm": 37.03200912475586, "learning_rate": 4.74e-05, "loss": 1.4372, "step": 1842 }, { "epoch": 14.744, "grad_norm": 28.270767211914062, "learning_rate": 4.739555555555556e-05, "loss": 1.623, "step": 1843 }, { "epoch": 14.752, "grad_norm": 31.610158920288086, "learning_rate": 4.739111111111112e-05, "loss": 1.601, "step": 1844 }, { "epoch": 14.76, "grad_norm": 33.433650970458984, "learning_rate": 4.7386666666666665e-05, "loss": 1.336, "step": 1845 }, { "epoch": 14.768, "grad_norm": 144.4530029296875, "learning_rate": 4.738222222222222e-05, "loss": 1.3774, "step": 1846 }, { "epoch": 14.776, "grad_norm": 27.89827537536621, "learning_rate": 4.737777777777778e-05, "loss": 1.8703, "step": 1847 }, { "epoch": 14.784, "grad_norm": 128.66188049316406, "learning_rate": 4.7373333333333336e-05, "loss": 1.7261, "step": 1848 }, { "epoch": 14.792, "grad_norm": 23.833282470703125, "learning_rate": 4.736888888888889e-05, "loss": 1.1747, "step": 1849 }, { "epoch": 14.8, "grad_norm": 32.72758865356445, "learning_rate": 4.7364444444444446e-05, "loss": 1.1593, "step": 1850 }, { "epoch": 14.808, "grad_norm": 45.645355224609375, "learning_rate": 4.736000000000001e-05, "loss": 1.6693, "step": 1851 }, { "epoch": 14.816, "grad_norm": 24.10607147216797, "learning_rate": 4.7355555555555555e-05, "loss": 1.6718, "step": 1852 }, { "epoch": 14.824, "grad_norm": 19.897886276245117, "learning_rate": 4.735111111111111e-05, "loss": 1.4351, "step": 1853 }, { "epoch": 14.832, "grad_norm": 25.812583923339844, "learning_rate": 4.7346666666666665e-05, "loss": 1.5033, "step": 1854 }, { "epoch": 14.84, "grad_norm": 33.21017074584961, "learning_rate": 4.734222222222223e-05, "loss": 1.4552, "step": 1855 }, { "epoch": 14.848, "grad_norm": 51.69404220581055, "learning_rate": 4.733777777777778e-05, "loss": 1.4487, "step": 1856 }, { "epoch": 14.856, "grad_norm": 30.255582809448242, "learning_rate": 4.7333333333333336e-05, "loss": 1.4467, "step": 1857 }, { "epoch": 14.864, "grad_norm": 28.80440330505371, "learning_rate": 4.732888888888889e-05, "loss": 1.2983, "step": 1858 }, { "epoch": 14.872, "grad_norm": 36.123287200927734, "learning_rate": 4.7324444444444446e-05, "loss": 1.4952, "step": 1859 }, { "epoch": 14.88, "grad_norm": 40.97507858276367, "learning_rate": 4.732e-05, "loss": 1.3806, "step": 1860 }, { "epoch": 14.888, "grad_norm": 36.07024002075195, "learning_rate": 4.7315555555555556e-05, "loss": 1.6126, "step": 1861 }, { "epoch": 14.896, "grad_norm": 164.66209411621094, "learning_rate": 4.731111111111111e-05, "loss": 1.3213, "step": 1862 }, { "epoch": 14.904, "grad_norm": 180.08157348632812, "learning_rate": 4.730666666666667e-05, "loss": 1.33, "step": 1863 }, { "epoch": 14.912, "grad_norm": 20.04672622680664, "learning_rate": 4.730222222222223e-05, "loss": 1.3172, "step": 1864 }, { "epoch": 14.92, "grad_norm": 21.224021911621094, "learning_rate": 4.729777777777778e-05, "loss": 1.2319, "step": 1865 }, { "epoch": 14.928, "grad_norm": 27.066242218017578, "learning_rate": 4.729333333333334e-05, "loss": 1.5081, "step": 1866 }, { "epoch": 14.936, "grad_norm": 27.709230422973633, "learning_rate": 4.728888888888889e-05, "loss": 2.9079, "step": 1867 }, { "epoch": 14.943999999999999, "grad_norm": 137.754638671875, "learning_rate": 4.7284444444444446e-05, "loss": 2.0778, "step": 1868 }, { "epoch": 14.952, "grad_norm": 36.05681610107422, "learning_rate": 4.728e-05, "loss": 0.9751, "step": 1869 }, { "epoch": 14.96, "grad_norm": 14.668322563171387, "learning_rate": 4.7275555555555556e-05, "loss": 1.3349, "step": 1870 }, { "epoch": 14.968, "grad_norm": 41.67809295654297, "learning_rate": 4.727111111111112e-05, "loss": 1.524, "step": 1871 }, { "epoch": 14.975999999999999, "grad_norm": 33.471309661865234, "learning_rate": 4.726666666666667e-05, "loss": 2.6746, "step": 1872 }, { "epoch": 14.984, "grad_norm": 36.25920867919922, "learning_rate": 4.726222222222223e-05, "loss": 1.2538, "step": 1873 }, { "epoch": 14.992, "grad_norm": 38.65058517456055, "learning_rate": 4.7257777777777776e-05, "loss": 1.2399, "step": 1874 }, { "epoch": 15.0, "grad_norm": 37.90276336669922, "learning_rate": 4.725333333333334e-05, "loss": 1.7532, "step": 1875 }, { "epoch": 15.0, "eval_loss": 1.6843708753585815, "eval_map": 0.1728, "eval_map_50": 0.3747, "eval_map_75": 0.1236, "eval_map_Coverall": 0.4723, "eval_map_Face_Shield": 0.1677, "eval_map_Gloves": 0.0919, "eval_map_Goggles": 0.0269, "eval_map_Mask": 0.1052, "eval_map_large": 0.1959, "eval_map_medium": 0.086, "eval_map_small": 0.1166, "eval_mar_1": 0.1734, "eval_mar_10": 0.3818, "eval_mar_100": 0.3925, "eval_mar_100_Coverall": 0.6889, "eval_mar_100_Face_Shield": 0.5706, "eval_mar_100_Gloves": 0.2164, "eval_mar_100_Goggles": 0.175, "eval_mar_100_Mask": 0.3115, "eval_mar_large": 0.4617, "eval_mar_medium": 0.2628, "eval_mar_small": 0.1806, "eval_runtime": 0.9106, "eval_samples_per_second": 31.848, "eval_steps_per_second": 2.196, "step": 1875 }, { "epoch": 15.008, "grad_norm": 22.392513275146484, "learning_rate": 4.724888888888889e-05, "loss": 3.0572, "step": 1876 }, { "epoch": 15.016, "grad_norm": 35.55105209350586, "learning_rate": 4.724444444444445e-05, "loss": 1.384, "step": 1877 }, { "epoch": 15.024, "grad_norm": 27.351747512817383, "learning_rate": 4.724e-05, "loss": 1.8926, "step": 1878 }, { "epoch": 15.032, "grad_norm": 27.78337860107422, "learning_rate": 4.7235555555555557e-05, "loss": 1.3308, "step": 1879 }, { "epoch": 15.04, "grad_norm": 20.118488311767578, "learning_rate": 4.723111111111112e-05, "loss": 1.6674, "step": 1880 }, { "epoch": 15.048, "grad_norm": 31.198427200317383, "learning_rate": 4.7226666666666666e-05, "loss": 1.4209, "step": 1881 }, { "epoch": 15.056, "grad_norm": 22.99424934387207, "learning_rate": 4.722222222222222e-05, "loss": 1.5119, "step": 1882 }, { "epoch": 15.064, "grad_norm": 19.336353302001953, "learning_rate": 4.7217777777777776e-05, "loss": 1.5252, "step": 1883 }, { "epoch": 15.072, "grad_norm": 33.8846435546875, "learning_rate": 4.721333333333334e-05, "loss": 2.2251, "step": 1884 }, { "epoch": 15.08, "grad_norm": 33.03853225708008, "learning_rate": 4.720888888888889e-05, "loss": 1.7072, "step": 1885 }, { "epoch": 15.088, "grad_norm": 37.58101272583008, "learning_rate": 4.720444444444445e-05, "loss": 1.727, "step": 1886 }, { "epoch": 15.096, "grad_norm": 40.207542419433594, "learning_rate": 4.72e-05, "loss": 1.3314, "step": 1887 }, { "epoch": 15.104, "grad_norm": 33.216651916503906, "learning_rate": 4.719555555555556e-05, "loss": 1.6305, "step": 1888 }, { "epoch": 15.112, "grad_norm": 17.63665771484375, "learning_rate": 4.719111111111111e-05, "loss": 1.5109, "step": 1889 }, { "epoch": 15.12, "grad_norm": 40.79215621948242, "learning_rate": 4.718666666666667e-05, "loss": 1.4453, "step": 1890 }, { "epoch": 15.128, "grad_norm": 32.41107177734375, "learning_rate": 4.718222222222222e-05, "loss": 1.4843, "step": 1891 }, { "epoch": 15.136, "grad_norm": 60.24119186401367, "learning_rate": 4.717777777777778e-05, "loss": 1.3127, "step": 1892 }, { "epoch": 15.144, "grad_norm": 50.37331771850586, "learning_rate": 4.717333333333334e-05, "loss": 1.5602, "step": 1893 }, { "epoch": 15.152, "grad_norm": 41.623046875, "learning_rate": 4.716888888888889e-05, "loss": 1.5519, "step": 1894 }, { "epoch": 15.16, "grad_norm": 24.452613830566406, "learning_rate": 4.716444444444445e-05, "loss": 1.2434, "step": 1895 }, { "epoch": 15.168, "grad_norm": 27.802953720092773, "learning_rate": 4.716e-05, "loss": 1.1871, "step": 1896 }, { "epoch": 15.176, "grad_norm": 28.543771743774414, "learning_rate": 4.715555555555556e-05, "loss": 2.1075, "step": 1897 }, { "epoch": 15.184, "grad_norm": 43.204654693603516, "learning_rate": 4.715111111111111e-05, "loss": 1.3399, "step": 1898 }, { "epoch": 15.192, "grad_norm": 53.7172966003418, "learning_rate": 4.714666666666667e-05, "loss": 1.563, "step": 1899 }, { "epoch": 15.2, "grad_norm": 20.111604690551758, "learning_rate": 4.714222222222223e-05, "loss": 1.1962, "step": 1900 }, { "epoch": 15.208, "grad_norm": 36.69060516357422, "learning_rate": 4.7137777777777783e-05, "loss": 1.6574, "step": 1901 }, { "epoch": 15.216, "grad_norm": 34.82632827758789, "learning_rate": 4.713333333333333e-05, "loss": 1.217, "step": 1902 }, { "epoch": 15.224, "grad_norm": 61.89232635498047, "learning_rate": 4.7128888888888886e-05, "loss": 1.755, "step": 1903 }, { "epoch": 15.232, "grad_norm": 19.12173080444336, "learning_rate": 4.712444444444445e-05, "loss": 1.8947, "step": 1904 }, { "epoch": 15.24, "grad_norm": 39.175636291503906, "learning_rate": 4.712e-05, "loss": 1.2942, "step": 1905 }, { "epoch": 15.248, "grad_norm": 18.774391174316406, "learning_rate": 4.711555555555556e-05, "loss": 1.454, "step": 1906 }, { "epoch": 15.256, "grad_norm": 75.38629913330078, "learning_rate": 4.711111111111111e-05, "loss": 1.663, "step": 1907 }, { "epoch": 15.264, "grad_norm": 29.201862335205078, "learning_rate": 4.7106666666666674e-05, "loss": 1.8121, "step": 1908 }, { "epoch": 15.272, "grad_norm": 46.99737548828125, "learning_rate": 4.710222222222222e-05, "loss": 1.6289, "step": 1909 }, { "epoch": 15.28, "grad_norm": 17.71269416809082, "learning_rate": 4.709777777777778e-05, "loss": 1.4241, "step": 1910 }, { "epoch": 15.288, "grad_norm": 36.3179931640625, "learning_rate": 4.709333333333333e-05, "loss": 1.614, "step": 1911 }, { "epoch": 15.296, "grad_norm": 118.02388000488281, "learning_rate": 4.7088888888888894e-05, "loss": 1.5311, "step": 1912 }, { "epoch": 15.304, "grad_norm": 51.69458770751953, "learning_rate": 4.708444444444445e-05, "loss": 1.5757, "step": 1913 }, { "epoch": 15.312, "grad_norm": 23.685802459716797, "learning_rate": 4.708e-05, "loss": 1.4804, "step": 1914 }, { "epoch": 15.32, "grad_norm": 30.548912048339844, "learning_rate": 4.707555555555556e-05, "loss": 1.5316, "step": 1915 }, { "epoch": 15.328, "grad_norm": 51.75166702270508, "learning_rate": 4.707111111111111e-05, "loss": 1.5149, "step": 1916 }, { "epoch": 15.336, "grad_norm": 25.36699104309082, "learning_rate": 4.706666666666667e-05, "loss": 1.7567, "step": 1917 }, { "epoch": 15.344, "grad_norm": 26.523113250732422, "learning_rate": 4.706222222222222e-05, "loss": 1.9676, "step": 1918 }, { "epoch": 15.352, "grad_norm": 50.615928649902344, "learning_rate": 4.705777777777778e-05, "loss": 1.4774, "step": 1919 }, { "epoch": 15.36, "grad_norm": 335.9318542480469, "learning_rate": 4.705333333333334e-05, "loss": 1.3315, "step": 1920 }, { "epoch": 15.368, "grad_norm": 20.209022521972656, "learning_rate": 4.7048888888888894e-05, "loss": 1.2587, "step": 1921 }, { "epoch": 15.376, "grad_norm": 28.308189392089844, "learning_rate": 4.704444444444445e-05, "loss": 1.6871, "step": 1922 }, { "epoch": 15.384, "grad_norm": 26.74424934387207, "learning_rate": 4.7040000000000004e-05, "loss": 1.21, "step": 1923 }, { "epoch": 15.392, "grad_norm": 22.612747192382812, "learning_rate": 4.703555555555556e-05, "loss": 1.5486, "step": 1924 }, { "epoch": 15.4, "grad_norm": 29.41131591796875, "learning_rate": 4.703111111111111e-05, "loss": 1.1134, "step": 1925 }, { "epoch": 15.408, "grad_norm": 26.83686637878418, "learning_rate": 4.702666666666667e-05, "loss": 1.1893, "step": 1926 }, { "epoch": 15.416, "grad_norm": 54.40666580200195, "learning_rate": 4.702222222222222e-05, "loss": 1.4489, "step": 1927 }, { "epoch": 15.424, "grad_norm": 324.913330078125, "learning_rate": 4.701777777777778e-05, "loss": 1.261, "step": 1928 }, { "epoch": 15.432, "grad_norm": 28.96255874633789, "learning_rate": 4.701333333333334e-05, "loss": 1.4013, "step": 1929 }, { "epoch": 15.44, "grad_norm": 20.939544677734375, "learning_rate": 4.7008888888888894e-05, "loss": 1.5449, "step": 1930 }, { "epoch": 15.448, "grad_norm": 21.5989933013916, "learning_rate": 4.700444444444444e-05, "loss": 1.3189, "step": 1931 }, { "epoch": 15.456, "grad_norm": 28.91246223449707, "learning_rate": 4.7e-05, "loss": 1.8265, "step": 1932 }, { "epoch": 15.464, "grad_norm": 72.63558959960938, "learning_rate": 4.699555555555556e-05, "loss": 1.1903, "step": 1933 }, { "epoch": 15.472, "grad_norm": 33.099586486816406, "learning_rate": 4.6991111111111114e-05, "loss": 1.5986, "step": 1934 }, { "epoch": 15.48, "grad_norm": 46.314605712890625, "learning_rate": 4.698666666666667e-05, "loss": 1.3728, "step": 1935 }, { "epoch": 15.488, "grad_norm": 22.708446502685547, "learning_rate": 4.6982222222222223e-05, "loss": 1.2746, "step": 1936 }, { "epoch": 15.496, "grad_norm": 45.169593811035156, "learning_rate": 4.6977777777777785e-05, "loss": 1.8149, "step": 1937 }, { "epoch": 15.504, "grad_norm": 27.232667922973633, "learning_rate": 4.697333333333333e-05, "loss": 1.5979, "step": 1938 }, { "epoch": 15.512, "grad_norm": 31.75835418701172, "learning_rate": 4.696888888888889e-05, "loss": 1.0204, "step": 1939 }, { "epoch": 15.52, "grad_norm": 28.48797035217285, "learning_rate": 4.696444444444444e-05, "loss": 1.7059, "step": 1940 }, { "epoch": 15.528, "grad_norm": 24.561290740966797, "learning_rate": 4.6960000000000004e-05, "loss": 1.4396, "step": 1941 }, { "epoch": 15.536, "grad_norm": 23.031536102294922, "learning_rate": 4.695555555555556e-05, "loss": 1.2079, "step": 1942 }, { "epoch": 15.544, "grad_norm": 31.859329223632812, "learning_rate": 4.6951111111111114e-05, "loss": 1.6257, "step": 1943 }, { "epoch": 15.552, "grad_norm": 46.57132339477539, "learning_rate": 4.694666666666667e-05, "loss": 1.3376, "step": 1944 }, { "epoch": 15.56, "grad_norm": 31.718767166137695, "learning_rate": 4.6942222222222224e-05, "loss": 1.902, "step": 1945 }, { "epoch": 15.568, "grad_norm": 22.263349533081055, "learning_rate": 4.693777777777778e-05, "loss": 1.671, "step": 1946 }, { "epoch": 15.576, "grad_norm": 59.69562911987305, "learning_rate": 4.6933333333333333e-05, "loss": 1.9137, "step": 1947 }, { "epoch": 15.584, "grad_norm": 81.03209686279297, "learning_rate": 4.692888888888889e-05, "loss": 1.2146, "step": 1948 }, { "epoch": 15.592, "grad_norm": 26.77716636657715, "learning_rate": 4.692444444444445e-05, "loss": 2.8127, "step": 1949 }, { "epoch": 15.6, "grad_norm": 33.047271728515625, "learning_rate": 4.6920000000000005e-05, "loss": 1.7043, "step": 1950 }, { "epoch": 15.608, "grad_norm": 44.62483215332031, "learning_rate": 4.691555555555556e-05, "loss": 1.4074, "step": 1951 }, { "epoch": 15.616, "grad_norm": 30.191083908081055, "learning_rate": 4.6911111111111114e-05, "loss": 1.562, "step": 1952 }, { "epoch": 15.624, "grad_norm": 25.378185272216797, "learning_rate": 4.690666666666667e-05, "loss": 2.0129, "step": 1953 }, { "epoch": 15.632, "grad_norm": 40.286460876464844, "learning_rate": 4.6902222222222224e-05, "loss": 1.7176, "step": 1954 }, { "epoch": 15.64, "grad_norm": 30.180932998657227, "learning_rate": 4.689777777777778e-05, "loss": 1.2706, "step": 1955 }, { "epoch": 15.648, "grad_norm": 33.247962951660156, "learning_rate": 4.6893333333333334e-05, "loss": 1.9138, "step": 1956 }, { "epoch": 15.656, "grad_norm": 38.55277633666992, "learning_rate": 4.6888888888888895e-05, "loss": 1.2679, "step": 1957 }, { "epoch": 15.664, "grad_norm": 31.458539962768555, "learning_rate": 4.688444444444445e-05, "loss": 1.4181, "step": 1958 }, { "epoch": 15.672, "grad_norm": 52.862754821777344, "learning_rate": 4.688e-05, "loss": 1.4201, "step": 1959 }, { "epoch": 15.68, "grad_norm": 629.9129638671875, "learning_rate": 4.687555555555555e-05, "loss": 1.7609, "step": 1960 }, { "epoch": 15.688, "grad_norm": 49.3269157409668, "learning_rate": 4.6871111111111115e-05, "loss": 1.425, "step": 1961 }, { "epoch": 15.696, "grad_norm": 18.148223876953125, "learning_rate": 4.686666666666667e-05, "loss": 1.2276, "step": 1962 }, { "epoch": 15.704, "grad_norm": 35.56953430175781, "learning_rate": 4.6862222222222225e-05, "loss": 1.1608, "step": 1963 }, { "epoch": 15.712, "grad_norm": 25.871171951293945, "learning_rate": 4.685777777777778e-05, "loss": 1.7513, "step": 1964 }, { "epoch": 15.72, "grad_norm": 31.21097183227539, "learning_rate": 4.685333333333334e-05, "loss": 1.088, "step": 1965 }, { "epoch": 15.728, "grad_norm": 25.071794509887695, "learning_rate": 4.684888888888889e-05, "loss": 1.8907, "step": 1966 }, { "epoch": 15.736, "grad_norm": 41.568763732910156, "learning_rate": 4.6844444444444444e-05, "loss": 1.7953, "step": 1967 }, { "epoch": 15.744, "grad_norm": 25.387615203857422, "learning_rate": 4.684e-05, "loss": 1.4018, "step": 1968 }, { "epoch": 15.752, "grad_norm": 30.678199768066406, "learning_rate": 4.683555555555556e-05, "loss": 1.7673, "step": 1969 }, { "epoch": 15.76, "grad_norm": 27.41950035095215, "learning_rate": 4.6831111111111115e-05, "loss": 2.3656, "step": 1970 }, { "epoch": 15.768, "grad_norm": 29.96198081970215, "learning_rate": 4.682666666666667e-05, "loss": 1.8175, "step": 1971 }, { "epoch": 15.776, "grad_norm": 22.92368507385254, "learning_rate": 4.6822222222222225e-05, "loss": 1.4945, "step": 1972 }, { "epoch": 15.784, "grad_norm": 50.788124084472656, "learning_rate": 4.681777777777778e-05, "loss": 1.3103, "step": 1973 }, { "epoch": 15.792, "grad_norm": 23.21949577331543, "learning_rate": 4.6813333333333335e-05, "loss": 1.8864, "step": 1974 }, { "epoch": 15.8, "grad_norm": 17.58495330810547, "learning_rate": 4.680888888888889e-05, "loss": 1.9669, "step": 1975 }, { "epoch": 15.808, "grad_norm": 28.748315811157227, "learning_rate": 4.6804444444444444e-05, "loss": 1.1742, "step": 1976 }, { "epoch": 15.816, "grad_norm": 25.433218002319336, "learning_rate": 4.6800000000000006e-05, "loss": 1.0575, "step": 1977 }, { "epoch": 15.824, "grad_norm": 21.413766860961914, "learning_rate": 4.679555555555556e-05, "loss": 1.8488, "step": 1978 }, { "epoch": 15.832, "grad_norm": 21.557626724243164, "learning_rate": 4.6791111111111116e-05, "loss": 1.842, "step": 1979 }, { "epoch": 15.84, "grad_norm": 23.614572525024414, "learning_rate": 4.678666666666667e-05, "loss": 1.5128, "step": 1980 }, { "epoch": 15.848, "grad_norm": 27.234872817993164, "learning_rate": 4.678222222222222e-05, "loss": 1.3876, "step": 1981 }, { "epoch": 15.856, "grad_norm": 37.59291076660156, "learning_rate": 4.677777777777778e-05, "loss": 1.2315, "step": 1982 }, { "epoch": 15.864, "grad_norm": 27.262025833129883, "learning_rate": 4.6773333333333335e-05, "loss": 1.3695, "step": 1983 }, { "epoch": 15.872, "grad_norm": 26.979263305664062, "learning_rate": 4.676888888888889e-05, "loss": 1.337, "step": 1984 }, { "epoch": 15.88, "grad_norm": 27.778108596801758, "learning_rate": 4.6764444444444445e-05, "loss": 1.1276, "step": 1985 }, { "epoch": 15.888, "grad_norm": 82.16838836669922, "learning_rate": 4.6760000000000006e-05, "loss": 1.346, "step": 1986 }, { "epoch": 15.896, "grad_norm": 26.076290130615234, "learning_rate": 4.675555555555556e-05, "loss": 1.3227, "step": 1987 }, { "epoch": 15.904, "grad_norm": 16.31975555419922, "learning_rate": 4.675111111111111e-05, "loss": 1.6362, "step": 1988 }, { "epoch": 15.912, "grad_norm": 33.183326721191406, "learning_rate": 4.6746666666666664e-05, "loss": 1.4438, "step": 1989 }, { "epoch": 15.92, "grad_norm": 33.54097366333008, "learning_rate": 4.6742222222222226e-05, "loss": 1.3495, "step": 1990 }, { "epoch": 15.928, "grad_norm": 31.66785430908203, "learning_rate": 4.673777777777778e-05, "loss": 1.5047, "step": 1991 }, { "epoch": 15.936, "grad_norm": 70.57028198242188, "learning_rate": 4.6733333333333335e-05, "loss": 1.8206, "step": 1992 }, { "epoch": 15.943999999999999, "grad_norm": 34.911563873291016, "learning_rate": 4.672888888888889e-05, "loss": 1.5248, "step": 1993 }, { "epoch": 15.952, "grad_norm": 21.114452362060547, "learning_rate": 4.672444444444445e-05, "loss": 1.7706, "step": 1994 }, { "epoch": 15.96, "grad_norm": 20.389328002929688, "learning_rate": 4.672e-05, "loss": 1.1218, "step": 1995 }, { "epoch": 15.968, "grad_norm": 50.65363311767578, "learning_rate": 4.6715555555555555e-05, "loss": 1.2959, "step": 1996 }, { "epoch": 15.975999999999999, "grad_norm": 42.15362548828125, "learning_rate": 4.671111111111111e-05, "loss": 2.0721, "step": 1997 }, { "epoch": 15.984, "grad_norm": 77.54347229003906, "learning_rate": 4.670666666666667e-05, "loss": 1.6255, "step": 1998 }, { "epoch": 15.992, "grad_norm": 89.77632904052734, "learning_rate": 4.6702222222222226e-05, "loss": 1.304, "step": 1999 }, { "epoch": 16.0, "grad_norm": 57.5623779296875, "learning_rate": 4.669777777777778e-05, "loss": 1.6178, "step": 2000 }, { "epoch": 16.0, "eval_loss": 1.6364859342575073, "eval_map": 0.1778, "eval_map_50": 0.4183, "eval_map_75": 0.1223, "eval_map_Coverall": 0.3949, "eval_map_Face_Shield": 0.2207, "eval_map_Gloves": 0.0935, "eval_map_Goggles": 0.0686, "eval_map_Mask": 0.1114, "eval_map_large": 0.2082, "eval_map_medium": 0.1142, "eval_map_small": 0.0591, "eval_mar_1": 0.1868, "eval_mar_10": 0.3385, "eval_mar_100": 0.3576, "eval_mar_100_Coverall": 0.64, "eval_mar_100_Face_Shield": 0.4471, "eval_mar_100_Gloves": 0.2689, "eval_mar_100_Goggles": 0.1781, "eval_mar_100_Mask": 0.2538, "eval_mar_large": 0.4431, "eval_mar_medium": 0.2752, "eval_mar_small": 0.0709, "eval_runtime": 0.9399, "eval_samples_per_second": 30.856, "eval_steps_per_second": 2.128, "step": 2000 }, { "epoch": 16.008, "grad_norm": 135.53964233398438, "learning_rate": 4.6693333333333336e-05, "loss": 1.367, "step": 2001 }, { "epoch": 16.016, "grad_norm": 32.371002197265625, "learning_rate": 4.668888888888889e-05, "loss": 1.0106, "step": 2002 }, { "epoch": 16.024, "grad_norm": 94.50123596191406, "learning_rate": 4.6684444444444445e-05, "loss": 1.6523, "step": 2003 }, { "epoch": 16.032, "grad_norm": 19.434402465820312, "learning_rate": 4.668e-05, "loss": 2.0113, "step": 2004 }, { "epoch": 16.04, "grad_norm": 75.76714324951172, "learning_rate": 4.6675555555555555e-05, "loss": 1.4413, "step": 2005 }, { "epoch": 16.048, "grad_norm": 22.843454360961914, "learning_rate": 4.667111111111112e-05, "loss": 1.4278, "step": 2006 }, { "epoch": 16.056, "grad_norm": 24.955345153808594, "learning_rate": 4.666666666666667e-05, "loss": 1.0956, "step": 2007 }, { "epoch": 16.064, "grad_norm": 16.620975494384766, "learning_rate": 4.6662222222222226e-05, "loss": 1.2549, "step": 2008 }, { "epoch": 16.072, "grad_norm": 38.34450912475586, "learning_rate": 4.665777777777778e-05, "loss": 1.2129, "step": 2009 }, { "epoch": 16.08, "grad_norm": 25.507713317871094, "learning_rate": 4.6653333333333336e-05, "loss": 1.2629, "step": 2010 }, { "epoch": 16.088, "grad_norm": 34.09609603881836, "learning_rate": 4.664888888888889e-05, "loss": 1.3911, "step": 2011 }, { "epoch": 16.096, "grad_norm": 31.861005783081055, "learning_rate": 4.6644444444444446e-05, "loss": 1.4351, "step": 2012 }, { "epoch": 16.104, "grad_norm": 23.27359962463379, "learning_rate": 4.664e-05, "loss": 1.4221, "step": 2013 }, { "epoch": 16.112, "grad_norm": 20.213289260864258, "learning_rate": 4.663555555555556e-05, "loss": 1.3844, "step": 2014 }, { "epoch": 16.12, "grad_norm": 30.560001373291016, "learning_rate": 4.663111111111112e-05, "loss": 1.5128, "step": 2015 }, { "epoch": 16.128, "grad_norm": 17.681245803833008, "learning_rate": 4.6626666666666665e-05, "loss": 1.0934, "step": 2016 }, { "epoch": 16.136, "grad_norm": 37.724609375, "learning_rate": 4.662222222222222e-05, "loss": 1.4041, "step": 2017 }, { "epoch": 16.144, "grad_norm": 32.37331008911133, "learning_rate": 4.661777777777778e-05, "loss": 1.6737, "step": 2018 }, { "epoch": 16.152, "grad_norm": 30.966938018798828, "learning_rate": 4.6613333333333337e-05, "loss": 1.4995, "step": 2019 }, { "epoch": 16.16, "grad_norm": 29.75154685974121, "learning_rate": 4.660888888888889e-05, "loss": 1.5182, "step": 2020 }, { "epoch": 16.168, "grad_norm": 32.521141052246094, "learning_rate": 4.6604444444444446e-05, "loss": 1.4988, "step": 2021 }, { "epoch": 16.176, "grad_norm": 32.64421463012695, "learning_rate": 4.660000000000001e-05, "loss": 1.2393, "step": 2022 }, { "epoch": 16.184, "grad_norm": 124.34160614013672, "learning_rate": 4.6595555555555556e-05, "loss": 1.389, "step": 2023 }, { "epoch": 16.192, "grad_norm": 27.14204978942871, "learning_rate": 4.659111111111111e-05, "loss": 1.4589, "step": 2024 }, { "epoch": 16.2, "grad_norm": 16.2360897064209, "learning_rate": 4.6586666666666666e-05, "loss": 1.9439, "step": 2025 }, { "epoch": 16.208, "grad_norm": 19.288284301757812, "learning_rate": 4.658222222222223e-05, "loss": 2.6234, "step": 2026 }, { "epoch": 16.216, "grad_norm": 33.696998596191406, "learning_rate": 4.657777777777778e-05, "loss": 1.0825, "step": 2027 }, { "epoch": 16.224, "grad_norm": 23.33849334716797, "learning_rate": 4.657333333333334e-05, "loss": 1.429, "step": 2028 }, { "epoch": 16.232, "grad_norm": 19.36865234375, "learning_rate": 4.656888888888889e-05, "loss": 1.2479, "step": 2029 }, { "epoch": 16.24, "grad_norm": 28.482563018798828, "learning_rate": 4.6564444444444447e-05, "loss": 2.2335, "step": 2030 }, { "epoch": 16.248, "grad_norm": 20.471410751342773, "learning_rate": 4.656e-05, "loss": 1.2291, "step": 2031 }, { "epoch": 16.256, "grad_norm": 25.26593780517578, "learning_rate": 4.6555555555555556e-05, "loss": 1.8521, "step": 2032 }, { "epoch": 16.264, "grad_norm": 24.145418167114258, "learning_rate": 4.655111111111111e-05, "loss": 1.5004, "step": 2033 }, { "epoch": 16.272, "grad_norm": 21.56568145751953, "learning_rate": 4.6546666666666666e-05, "loss": 1.6339, "step": 2034 }, { "epoch": 16.28, "grad_norm": 52.85014724731445, "learning_rate": 4.654222222222223e-05, "loss": 1.1205, "step": 2035 }, { "epoch": 16.288, "grad_norm": 27.839509963989258, "learning_rate": 4.653777777777778e-05, "loss": 1.733, "step": 2036 }, { "epoch": 16.296, "grad_norm": 31.186365127563477, "learning_rate": 4.653333333333334e-05, "loss": 1.4648, "step": 2037 }, { "epoch": 16.304, "grad_norm": 22.40900230407715, "learning_rate": 4.6528888888888885e-05, "loss": 1.8435, "step": 2038 }, { "epoch": 16.312, "grad_norm": 76.47213745117188, "learning_rate": 4.652444444444445e-05, "loss": 0.9455, "step": 2039 }, { "epoch": 16.32, "grad_norm": 38.69521713256836, "learning_rate": 4.652e-05, "loss": 1.5263, "step": 2040 }, { "epoch": 16.328, "grad_norm": 19.331140518188477, "learning_rate": 4.651555555555556e-05, "loss": 1.5095, "step": 2041 }, { "epoch": 16.336, "grad_norm": 21.492652893066406, "learning_rate": 4.651111111111111e-05, "loss": 1.0506, "step": 2042 }, { "epoch": 16.344, "grad_norm": 26.320417404174805, "learning_rate": 4.650666666666667e-05, "loss": 2.0966, "step": 2043 }, { "epoch": 16.352, "grad_norm": 160.09210205078125, "learning_rate": 4.650222222222223e-05, "loss": 1.5021, "step": 2044 }, { "epoch": 16.36, "grad_norm": 23.23456382751465, "learning_rate": 4.6497777777777776e-05, "loss": 1.5403, "step": 2045 }, { "epoch": 16.368, "grad_norm": 32.70758056640625, "learning_rate": 4.649333333333333e-05, "loss": 1.5232, "step": 2046 }, { "epoch": 16.376, "grad_norm": 42.422630310058594, "learning_rate": 4.648888888888889e-05, "loss": 1.4743, "step": 2047 }, { "epoch": 16.384, "grad_norm": 19.526973724365234, "learning_rate": 4.648444444444445e-05, "loss": 1.5562, "step": 2048 }, { "epoch": 16.392, "grad_norm": 24.29669189453125, "learning_rate": 4.648e-05, "loss": 1.3363, "step": 2049 }, { "epoch": 16.4, "grad_norm": 15.291128158569336, "learning_rate": 4.647555555555556e-05, "loss": 1.8934, "step": 2050 }, { "epoch": 16.408, "grad_norm": 31.190462112426758, "learning_rate": 4.647111111111111e-05, "loss": 3.2713, "step": 2051 }, { "epoch": 16.416, "grad_norm": 28.173383712768555, "learning_rate": 4.646666666666667e-05, "loss": 1.1084, "step": 2052 }, { "epoch": 16.424, "grad_norm": 23.033748626708984, "learning_rate": 4.646222222222222e-05, "loss": 1.4189, "step": 2053 }, { "epoch": 16.432, "grad_norm": 24.13422203063965, "learning_rate": 4.6457777777777776e-05, "loss": 1.2123, "step": 2054 }, { "epoch": 16.44, "grad_norm": 25.85186004638672, "learning_rate": 4.645333333333334e-05, "loss": 1.1281, "step": 2055 }, { "epoch": 16.448, "grad_norm": 20.039182662963867, "learning_rate": 4.644888888888889e-05, "loss": 1.6778, "step": 2056 }, { "epoch": 16.456, "grad_norm": 24.703685760498047, "learning_rate": 4.644444444444445e-05, "loss": 1.067, "step": 2057 }, { "epoch": 16.464, "grad_norm": 26.048898696899414, "learning_rate": 4.644e-05, "loss": 1.2879, "step": 2058 }, { "epoch": 16.472, "grad_norm": 15.470738410949707, "learning_rate": 4.643555555555556e-05, "loss": 1.12, "step": 2059 }, { "epoch": 16.48, "grad_norm": 25.462326049804688, "learning_rate": 4.643111111111111e-05, "loss": 1.4275, "step": 2060 }, { "epoch": 16.488, "grad_norm": 24.23862648010254, "learning_rate": 4.642666666666667e-05, "loss": 1.6229, "step": 2061 }, { "epoch": 16.496, "grad_norm": 37.58269500732422, "learning_rate": 4.642222222222222e-05, "loss": 1.3966, "step": 2062 }, { "epoch": 16.504, "grad_norm": 22.454833984375, "learning_rate": 4.6417777777777784e-05, "loss": 1.175, "step": 2063 }, { "epoch": 16.512, "grad_norm": 39.7972297668457, "learning_rate": 4.641333333333334e-05, "loss": 1.4049, "step": 2064 }, { "epoch": 16.52, "grad_norm": 26.674968719482422, "learning_rate": 4.640888888888889e-05, "loss": 1.3509, "step": 2065 }, { "epoch": 16.528, "grad_norm": 23.85468101501465, "learning_rate": 4.640444444444445e-05, "loss": 1.7033, "step": 2066 }, { "epoch": 16.536, "grad_norm": 22.716712951660156, "learning_rate": 4.64e-05, "loss": 1.4591, "step": 2067 }, { "epoch": 16.544, "grad_norm": 70.3439712524414, "learning_rate": 4.639555555555556e-05, "loss": 1.425, "step": 2068 }, { "epoch": 16.552, "grad_norm": 15.822588920593262, "learning_rate": 4.639111111111111e-05, "loss": 1.6898, "step": 2069 }, { "epoch": 16.56, "grad_norm": 18.202049255371094, "learning_rate": 4.638666666666667e-05, "loss": 1.3565, "step": 2070 }, { "epoch": 16.568, "grad_norm": 23.74634552001953, "learning_rate": 4.638222222222223e-05, "loss": 1.5046, "step": 2071 }, { "epoch": 16.576, "grad_norm": 32.814903259277344, "learning_rate": 4.6377777777777784e-05, "loss": 1.2319, "step": 2072 }, { "epoch": 16.584, "grad_norm": 21.079153060913086, "learning_rate": 4.637333333333333e-05, "loss": 1.42, "step": 2073 }, { "epoch": 16.592, "grad_norm": 19.827070236206055, "learning_rate": 4.636888888888889e-05, "loss": 1.5097, "step": 2074 }, { "epoch": 16.6, "grad_norm": 31.12939453125, "learning_rate": 4.636444444444445e-05, "loss": 1.676, "step": 2075 }, { "epoch": 16.608, "grad_norm": 23.144981384277344, "learning_rate": 4.636e-05, "loss": 1.1947, "step": 2076 }, { "epoch": 16.616, "grad_norm": 18.077730178833008, "learning_rate": 4.635555555555556e-05, "loss": 1.1169, "step": 2077 }, { "epoch": 16.624, "grad_norm": 19.525007247924805, "learning_rate": 4.635111111111111e-05, "loss": 1.4474, "step": 2078 }, { "epoch": 16.632, "grad_norm": 22.398191452026367, "learning_rate": 4.6346666666666675e-05, "loss": 1.272, "step": 2079 }, { "epoch": 16.64, "grad_norm": 23.56058120727539, "learning_rate": 4.634222222222222e-05, "loss": 1.9702, "step": 2080 }, { "epoch": 16.648, "grad_norm": 33.05330276489258, "learning_rate": 4.633777777777778e-05, "loss": 1.7338, "step": 2081 }, { "epoch": 16.656, "grad_norm": 32.731407165527344, "learning_rate": 4.633333333333333e-05, "loss": 2.1228, "step": 2082 }, { "epoch": 16.664, "grad_norm": 15.707101821899414, "learning_rate": 4.632888888888889e-05, "loss": 1.3348, "step": 2083 }, { "epoch": 16.672, "grad_norm": 38.0722541809082, "learning_rate": 4.632444444444445e-05, "loss": 1.319, "step": 2084 }, { "epoch": 16.68, "grad_norm": 21.73859405517578, "learning_rate": 4.6320000000000004e-05, "loss": 1.4618, "step": 2085 }, { "epoch": 16.688, "grad_norm": 32.88330078125, "learning_rate": 4.631555555555556e-05, "loss": 1.5892, "step": 2086 }, { "epoch": 16.696, "grad_norm": 34.22654342651367, "learning_rate": 4.6311111111111113e-05, "loss": 1.436, "step": 2087 }, { "epoch": 16.704, "grad_norm": 35.82561111450195, "learning_rate": 4.630666666666667e-05, "loss": 1.1503, "step": 2088 }, { "epoch": 16.712, "grad_norm": 30.087011337280273, "learning_rate": 4.630222222222222e-05, "loss": 1.0926, "step": 2089 }, { "epoch": 16.72, "grad_norm": 39.30044174194336, "learning_rate": 4.629777777777778e-05, "loss": 1.42, "step": 2090 }, { "epoch": 16.728, "grad_norm": 23.747407913208008, "learning_rate": 4.629333333333333e-05, "loss": 1.7283, "step": 2091 }, { "epoch": 16.736, "grad_norm": 24.30047035217285, "learning_rate": 4.6288888888888894e-05, "loss": 1.7849, "step": 2092 }, { "epoch": 16.744, "grad_norm": 20.77560806274414, "learning_rate": 4.628444444444445e-05, "loss": 1.2984, "step": 2093 }, { "epoch": 16.752, "grad_norm": 34.09810256958008, "learning_rate": 4.6280000000000004e-05, "loss": 1.3851, "step": 2094 }, { "epoch": 16.76, "grad_norm": 24.50315284729004, "learning_rate": 4.627555555555555e-05, "loss": 1.4129, "step": 2095 }, { "epoch": 16.768, "grad_norm": 27.00303840637207, "learning_rate": 4.6271111111111114e-05, "loss": 1.238, "step": 2096 }, { "epoch": 16.776, "grad_norm": 26.423185348510742, "learning_rate": 4.626666666666667e-05, "loss": 1.5527, "step": 2097 }, { "epoch": 16.784, "grad_norm": 63.92178726196289, "learning_rate": 4.6262222222222224e-05, "loss": 1.5137, "step": 2098 }, { "epoch": 16.792, "grad_norm": 52.87131118774414, "learning_rate": 4.625777777777778e-05, "loss": 1.74, "step": 2099 }, { "epoch": 16.8, "grad_norm": 25.68047523498535, "learning_rate": 4.625333333333334e-05, "loss": 1.6619, "step": 2100 }, { "epoch": 16.808, "grad_norm": 187.61923217773438, "learning_rate": 4.6248888888888895e-05, "loss": 1.1026, "step": 2101 }, { "epoch": 16.816, "grad_norm": 49.20827865600586, "learning_rate": 4.624444444444444e-05, "loss": 1.8758, "step": 2102 }, { "epoch": 16.824, "grad_norm": 32.79494857788086, "learning_rate": 4.624e-05, "loss": 1.2601, "step": 2103 }, { "epoch": 16.832, "grad_norm": 20.170568466186523, "learning_rate": 4.623555555555556e-05, "loss": 1.5355, "step": 2104 }, { "epoch": 16.84, "grad_norm": 27.140945434570312, "learning_rate": 4.6231111111111114e-05, "loss": 2.2704, "step": 2105 }, { "epoch": 16.848, "grad_norm": 19.902599334716797, "learning_rate": 4.622666666666667e-05, "loss": 1.6249, "step": 2106 }, { "epoch": 16.856, "grad_norm": 34.53247833251953, "learning_rate": 4.6222222222222224e-05, "loss": 1.1469, "step": 2107 }, { "epoch": 16.864, "grad_norm": 17.26568603515625, "learning_rate": 4.621777777777778e-05, "loss": 1.3264, "step": 2108 }, { "epoch": 16.872, "grad_norm": 30.8466854095459, "learning_rate": 4.6213333333333334e-05, "loss": 1.2762, "step": 2109 }, { "epoch": 16.88, "grad_norm": 28.8914794921875, "learning_rate": 4.620888888888889e-05, "loss": 1.4054, "step": 2110 }, { "epoch": 16.888, "grad_norm": 37.78993606567383, "learning_rate": 4.620444444444444e-05, "loss": 1.2235, "step": 2111 }, { "epoch": 16.896, "grad_norm": 29.484737396240234, "learning_rate": 4.6200000000000005e-05, "loss": 1.7078, "step": 2112 }, { "epoch": 16.904, "grad_norm": 26.58283233642578, "learning_rate": 4.619555555555556e-05, "loss": 1.4086, "step": 2113 }, { "epoch": 16.912, "grad_norm": 19.257253646850586, "learning_rate": 4.6191111111111115e-05, "loss": 1.0746, "step": 2114 }, { "epoch": 16.92, "grad_norm": 51.63299560546875, "learning_rate": 4.618666666666667e-05, "loss": 1.5714, "step": 2115 }, { "epoch": 16.928, "grad_norm": 23.574560165405273, "learning_rate": 4.6182222222222224e-05, "loss": 1.8653, "step": 2116 }, { "epoch": 16.936, "grad_norm": 17.99546241760254, "learning_rate": 4.617777777777778e-05, "loss": 1.2104, "step": 2117 }, { "epoch": 16.944, "grad_norm": 35.321475982666016, "learning_rate": 4.6173333333333334e-05, "loss": 1.1771, "step": 2118 }, { "epoch": 16.951999999999998, "grad_norm": 27.498533248901367, "learning_rate": 4.616888888888889e-05, "loss": 1.307, "step": 2119 }, { "epoch": 16.96, "grad_norm": 21.749229431152344, "learning_rate": 4.616444444444445e-05, "loss": 1.1398, "step": 2120 }, { "epoch": 16.968, "grad_norm": 18.269315719604492, "learning_rate": 4.6160000000000005e-05, "loss": 1.1086, "step": 2121 }, { "epoch": 16.976, "grad_norm": 23.967021942138672, "learning_rate": 4.615555555555556e-05, "loss": 2.3855, "step": 2122 }, { "epoch": 16.984, "grad_norm": 26.498882293701172, "learning_rate": 4.6151111111111115e-05, "loss": 1.2836, "step": 2123 }, { "epoch": 16.992, "grad_norm": 48.075103759765625, "learning_rate": 4.614666666666667e-05, "loss": 1.4415, "step": 2124 }, { "epoch": 17.0, "grad_norm": 33.43544006347656, "learning_rate": 4.6142222222222225e-05, "loss": 1.4088, "step": 2125 }, { "epoch": 17.0, "eval_loss": 1.4529426097869873, "eval_map": 0.2347, "eval_map_50": 0.5149, "eval_map_75": 0.1488, "eval_map_Coverall": 0.4832, "eval_map_Face_Shield": 0.2304, "eval_map_Gloves": 0.1657, "eval_map_Goggles": 0.0629, "eval_map_Mask": 0.2312, "eval_map_large": 0.3102, "eval_map_medium": 0.1524, "eval_map_small": 0.1323, "eval_mar_1": 0.2145, "eval_mar_10": 0.4025, "eval_mar_100": 0.4173, "eval_mar_100_Coverall": 0.6889, "eval_mar_100_Face_Shield": 0.4824, "eval_mar_100_Gloves": 0.3279, "eval_mar_100_Goggles": 0.2219, "eval_mar_100_Mask": 0.3654, "eval_mar_large": 0.4832, "eval_mar_medium": 0.3025, "eval_mar_small": 0.1637, "eval_runtime": 0.9113, "eval_samples_per_second": 31.823, "eval_steps_per_second": 2.195, "step": 2125 }, { "epoch": 17.008, "grad_norm": 41.6363639831543, "learning_rate": 4.613777777777778e-05, "loss": 1.9916, "step": 2126 }, { "epoch": 17.016, "grad_norm": 17.820240020751953, "learning_rate": 4.6133333333333334e-05, "loss": 1.9234, "step": 2127 }, { "epoch": 17.024, "grad_norm": 39.984031677246094, "learning_rate": 4.6128888888888896e-05, "loss": 1.2929, "step": 2128 }, { "epoch": 17.032, "grad_norm": 59.7001838684082, "learning_rate": 4.612444444444445e-05, "loss": 1.7554, "step": 2129 }, { "epoch": 17.04, "grad_norm": 47.89375686645508, "learning_rate": 4.612e-05, "loss": 2.531, "step": 2130 }, { "epoch": 17.048, "grad_norm": 41.37944412231445, "learning_rate": 4.6115555555555554e-05, "loss": 1.125, "step": 2131 }, { "epoch": 17.056, "grad_norm": 18.858184814453125, "learning_rate": 4.6111111111111115e-05, "loss": 1.1691, "step": 2132 }, { "epoch": 17.064, "grad_norm": 26.702932357788086, "learning_rate": 4.610666666666667e-05, "loss": 1.4591, "step": 2133 }, { "epoch": 17.072, "grad_norm": 30.499256134033203, "learning_rate": 4.6102222222222225e-05, "loss": 1.7751, "step": 2134 }, { "epoch": 17.08, "grad_norm": 20.997926712036133, "learning_rate": 4.609777777777778e-05, "loss": 1.5842, "step": 2135 }, { "epoch": 17.088, "grad_norm": 39.200157165527344, "learning_rate": 4.6093333333333335e-05, "loss": 1.1588, "step": 2136 }, { "epoch": 17.096, "grad_norm": 22.38324737548828, "learning_rate": 4.608888888888889e-05, "loss": 1.2324, "step": 2137 }, { "epoch": 17.104, "grad_norm": 17.646562576293945, "learning_rate": 4.6084444444444444e-05, "loss": 1.4278, "step": 2138 }, { "epoch": 17.112, "grad_norm": 41.25162124633789, "learning_rate": 4.608e-05, "loss": 1.2087, "step": 2139 }, { "epoch": 17.12, "grad_norm": 24.549116134643555, "learning_rate": 4.6075555555555554e-05, "loss": 1.8687, "step": 2140 }, { "epoch": 17.128, "grad_norm": 21.093852996826172, "learning_rate": 4.6071111111111116e-05, "loss": 1.4227, "step": 2141 }, { "epoch": 17.136, "grad_norm": 42.464324951171875, "learning_rate": 4.606666666666667e-05, "loss": 1.157, "step": 2142 }, { "epoch": 17.144, "grad_norm": 34.871463775634766, "learning_rate": 4.6062222222222225e-05, "loss": 1.4707, "step": 2143 }, { "epoch": 17.152, "grad_norm": 23.820045471191406, "learning_rate": 4.605777777777778e-05, "loss": 1.4269, "step": 2144 }, { "epoch": 17.16, "grad_norm": 29.56916618347168, "learning_rate": 4.6053333333333335e-05, "loss": 1.1726, "step": 2145 }, { "epoch": 17.168, "grad_norm": 21.91197395324707, "learning_rate": 4.604888888888889e-05, "loss": 1.6459, "step": 2146 }, { "epoch": 17.176, "grad_norm": 21.737445831298828, "learning_rate": 4.6044444444444445e-05, "loss": 1.6036, "step": 2147 }, { "epoch": 17.184, "grad_norm": 21.611936569213867, "learning_rate": 4.604e-05, "loss": 1.4277, "step": 2148 }, { "epoch": 17.192, "grad_norm": 40.0798225402832, "learning_rate": 4.603555555555556e-05, "loss": 1.6433, "step": 2149 }, { "epoch": 17.2, "grad_norm": 32.405181884765625, "learning_rate": 4.6031111111111116e-05, "loss": 1.1021, "step": 2150 }, { "epoch": 17.208, "grad_norm": 21.779462814331055, "learning_rate": 4.602666666666667e-05, "loss": 1.4767, "step": 2151 }, { "epoch": 17.216, "grad_norm": 29.461111068725586, "learning_rate": 4.602222222222222e-05, "loss": 1.2183, "step": 2152 }, { "epoch": 17.224, "grad_norm": 33.498226165771484, "learning_rate": 4.601777777777778e-05, "loss": 1.1425, "step": 2153 }, { "epoch": 17.232, "grad_norm": 35.05173873901367, "learning_rate": 4.6013333333333336e-05, "loss": 1.2842, "step": 2154 }, { "epoch": 17.24, "grad_norm": 20.5382080078125, "learning_rate": 4.600888888888889e-05, "loss": 1.3676, "step": 2155 }, { "epoch": 17.248, "grad_norm": 25.51238441467285, "learning_rate": 4.6004444444444445e-05, "loss": 1.5928, "step": 2156 }, { "epoch": 17.256, "grad_norm": 26.644832611083984, "learning_rate": 4.600000000000001e-05, "loss": 1.6841, "step": 2157 }, { "epoch": 17.264, "grad_norm": 14.423022270202637, "learning_rate": 4.599555555555556e-05, "loss": 1.5517, "step": 2158 }, { "epoch": 17.272, "grad_norm": 22.96717071533203, "learning_rate": 4.599111111111111e-05, "loss": 0.9454, "step": 2159 }, { "epoch": 17.28, "grad_norm": 40.48264694213867, "learning_rate": 4.5986666666666665e-05, "loss": 1.2355, "step": 2160 }, { "epoch": 17.288, "grad_norm": 22.195707321166992, "learning_rate": 4.5982222222222226e-05, "loss": 1.1754, "step": 2161 }, { "epoch": 17.296, "grad_norm": 19.211849212646484, "learning_rate": 4.597777777777778e-05, "loss": 1.1094, "step": 2162 }, { "epoch": 17.304, "grad_norm": 16.93325424194336, "learning_rate": 4.5973333333333336e-05, "loss": 1.0996, "step": 2163 }, { "epoch": 17.312, "grad_norm": 27.196998596191406, "learning_rate": 4.596888888888889e-05, "loss": 1.3512, "step": 2164 }, { "epoch": 17.32, "grad_norm": 21.161144256591797, "learning_rate": 4.5964444444444446e-05, "loss": 1.3502, "step": 2165 }, { "epoch": 17.328, "grad_norm": 21.917930603027344, "learning_rate": 4.596e-05, "loss": 1.4715, "step": 2166 }, { "epoch": 17.336, "grad_norm": 65.72294616699219, "learning_rate": 4.5955555555555555e-05, "loss": 3.2754, "step": 2167 }, { "epoch": 17.344, "grad_norm": 26.44985580444336, "learning_rate": 4.595111111111111e-05, "loss": 1.7292, "step": 2168 }, { "epoch": 17.352, "grad_norm": 44.1373291015625, "learning_rate": 4.594666666666667e-05, "loss": 1.9099, "step": 2169 }, { "epoch": 17.36, "grad_norm": 100.64148712158203, "learning_rate": 4.5942222222222227e-05, "loss": 1.437, "step": 2170 }, { "epoch": 17.368, "grad_norm": 20.02764892578125, "learning_rate": 4.593777777777778e-05, "loss": 1.5809, "step": 2171 }, { "epoch": 17.376, "grad_norm": 19.261262893676758, "learning_rate": 4.5933333333333336e-05, "loss": 1.432, "step": 2172 }, { "epoch": 17.384, "grad_norm": 24.93263053894043, "learning_rate": 4.592888888888889e-05, "loss": 1.0863, "step": 2173 }, { "epoch": 17.392, "grad_norm": 59.143707275390625, "learning_rate": 4.5924444444444446e-05, "loss": 1.2771, "step": 2174 }, { "epoch": 17.4, "grad_norm": 32.250999450683594, "learning_rate": 4.592e-05, "loss": 1.2154, "step": 2175 }, { "epoch": 17.408, "grad_norm": 19.26862907409668, "learning_rate": 4.5915555555555556e-05, "loss": 1.4008, "step": 2176 }, { "epoch": 17.416, "grad_norm": 31.82459259033203, "learning_rate": 4.591111111111112e-05, "loss": 2.3435, "step": 2177 }, { "epoch": 17.424, "grad_norm": 28.944067001342773, "learning_rate": 4.590666666666667e-05, "loss": 1.1751, "step": 2178 }, { "epoch": 17.432, "grad_norm": 40.23360824584961, "learning_rate": 4.590222222222223e-05, "loss": 1.389, "step": 2179 }, { "epoch": 17.44, "grad_norm": 240.9225616455078, "learning_rate": 4.589777777777778e-05, "loss": 2.0061, "step": 2180 }, { "epoch": 17.448, "grad_norm": 20.451047897338867, "learning_rate": 4.589333333333334e-05, "loss": 1.2308, "step": 2181 }, { "epoch": 17.456, "grad_norm": 27.489397048950195, "learning_rate": 4.588888888888889e-05, "loss": 1.1997, "step": 2182 }, { "epoch": 17.464, "grad_norm": 28.30182647705078, "learning_rate": 4.5884444444444446e-05, "loss": 1.0638, "step": 2183 }, { "epoch": 17.472, "grad_norm": 21.57144546508789, "learning_rate": 4.588e-05, "loss": 1.0608, "step": 2184 }, { "epoch": 17.48, "grad_norm": 40.91041946411133, "learning_rate": 4.587555555555556e-05, "loss": 1.6934, "step": 2185 }, { "epoch": 17.488, "grad_norm": 40.657962799072266, "learning_rate": 4.587111111111112e-05, "loss": 1.3975, "step": 2186 }, { "epoch": 17.496, "grad_norm": 22.089250564575195, "learning_rate": 4.5866666666666666e-05, "loss": 1.2131, "step": 2187 }, { "epoch": 17.504, "grad_norm": 36.90212631225586, "learning_rate": 4.586222222222222e-05, "loss": 1.2068, "step": 2188 }, { "epoch": 17.512, "grad_norm": 28.62275505065918, "learning_rate": 4.5857777777777775e-05, "loss": 1.1005, "step": 2189 }, { "epoch": 17.52, "grad_norm": 26.458471298217773, "learning_rate": 4.585333333333334e-05, "loss": 1.3767, "step": 2190 }, { "epoch": 17.528, "grad_norm": 40.351600646972656, "learning_rate": 4.584888888888889e-05, "loss": 1.3565, "step": 2191 }, { "epoch": 17.536, "grad_norm": 29.957101821899414, "learning_rate": 4.584444444444445e-05, "loss": 1.45, "step": 2192 }, { "epoch": 17.544, "grad_norm": 60.20361328125, "learning_rate": 4.584e-05, "loss": 1.512, "step": 2193 }, { "epoch": 17.552, "grad_norm": 27.946123123168945, "learning_rate": 4.5835555555555556e-05, "loss": 1.3716, "step": 2194 }, { "epoch": 17.56, "grad_norm": 22.722654342651367, "learning_rate": 4.583111111111111e-05, "loss": 1.3817, "step": 2195 }, { "epoch": 17.568, "grad_norm": 28.189266204833984, "learning_rate": 4.5826666666666666e-05, "loss": 1.6961, "step": 2196 }, { "epoch": 17.576, "grad_norm": 25.35343360900879, "learning_rate": 4.582222222222222e-05, "loss": 1.3044, "step": 2197 }, { "epoch": 17.584, "grad_norm": 23.451980590820312, "learning_rate": 4.581777777777778e-05, "loss": 1.243, "step": 2198 }, { "epoch": 17.592, "grad_norm": 30.136734008789062, "learning_rate": 4.581333333333334e-05, "loss": 1.5159, "step": 2199 }, { "epoch": 17.6, "grad_norm": 55.314945220947266, "learning_rate": 4.580888888888889e-05, "loss": 1.0525, "step": 2200 }, { "epoch": 17.608, "grad_norm": 59.38069534301758, "learning_rate": 4.580444444444445e-05, "loss": 1.545, "step": 2201 }, { "epoch": 17.616, "grad_norm": 47.17570114135742, "learning_rate": 4.58e-05, "loss": 1.5967, "step": 2202 }, { "epoch": 17.624, "grad_norm": 25.575057983398438, "learning_rate": 4.579555555555556e-05, "loss": 2.6667, "step": 2203 }, { "epoch": 17.632, "grad_norm": 32.46311569213867, "learning_rate": 4.579111111111111e-05, "loss": 1.2741, "step": 2204 }, { "epoch": 17.64, "grad_norm": 27.360891342163086, "learning_rate": 4.5786666666666666e-05, "loss": 1.7598, "step": 2205 }, { "epoch": 17.648, "grad_norm": 40.92235565185547, "learning_rate": 4.578222222222223e-05, "loss": 1.7307, "step": 2206 }, { "epoch": 17.656, "grad_norm": 34.449039459228516, "learning_rate": 4.577777777777778e-05, "loss": 1.5147, "step": 2207 }, { "epoch": 17.664, "grad_norm": 51.69443893432617, "learning_rate": 4.577333333333334e-05, "loss": 1.3053, "step": 2208 }, { "epoch": 17.672, "grad_norm": 26.54031753540039, "learning_rate": 4.5768888888888886e-05, "loss": 1.1538, "step": 2209 }, { "epoch": 17.68, "grad_norm": 32.46577835083008, "learning_rate": 4.576444444444445e-05, "loss": 1.2283, "step": 2210 }, { "epoch": 17.688, "grad_norm": 20.979965209960938, "learning_rate": 4.576e-05, "loss": 0.8214, "step": 2211 }, { "epoch": 17.696, "grad_norm": 21.76815414428711, "learning_rate": 4.575555555555556e-05, "loss": 1.3176, "step": 2212 }, { "epoch": 17.704, "grad_norm": 83.96430969238281, "learning_rate": 4.575111111111111e-05, "loss": 1.4273, "step": 2213 }, { "epoch": 17.712, "grad_norm": 30.681941986083984, "learning_rate": 4.5746666666666674e-05, "loss": 1.2148, "step": 2214 }, { "epoch": 17.72, "grad_norm": 31.599660873413086, "learning_rate": 4.574222222222223e-05, "loss": 1.5049, "step": 2215 }, { "epoch": 17.728, "grad_norm": 35.788551330566406, "learning_rate": 4.5737777777777777e-05, "loss": 1.403, "step": 2216 }, { "epoch": 17.736, "grad_norm": 51.63977813720703, "learning_rate": 4.573333333333333e-05, "loss": 1.0792, "step": 2217 }, { "epoch": 17.744, "grad_norm": 59.655887603759766, "learning_rate": 4.572888888888889e-05, "loss": 1.5166, "step": 2218 }, { "epoch": 17.752, "grad_norm": 33.10551071166992, "learning_rate": 4.572444444444445e-05, "loss": 1.399, "step": 2219 }, { "epoch": 17.76, "grad_norm": 27.293432235717773, "learning_rate": 4.572e-05, "loss": 1.4636, "step": 2220 }, { "epoch": 17.768, "grad_norm": 32.366493225097656, "learning_rate": 4.571555555555556e-05, "loss": 1.263, "step": 2221 }, { "epoch": 17.776, "grad_norm": 62.584388732910156, "learning_rate": 4.571111111111111e-05, "loss": 1.893, "step": 2222 }, { "epoch": 17.784, "grad_norm": 21.750871658325195, "learning_rate": 4.570666666666667e-05, "loss": 1.4221, "step": 2223 }, { "epoch": 17.792, "grad_norm": 32.39108657836914, "learning_rate": 4.570222222222222e-05, "loss": 1.1219, "step": 2224 }, { "epoch": 17.8, "grad_norm": 26.567882537841797, "learning_rate": 4.569777777777778e-05, "loss": 1.3735, "step": 2225 }, { "epoch": 17.808, "grad_norm": 25.29303550720215, "learning_rate": 4.569333333333334e-05, "loss": 1.4248, "step": 2226 }, { "epoch": 17.816, "grad_norm": 16.061214447021484, "learning_rate": 4.5688888888888893e-05, "loss": 1.3547, "step": 2227 }, { "epoch": 17.824, "grad_norm": 24.09099769592285, "learning_rate": 4.568444444444445e-05, "loss": 1.4158, "step": 2228 }, { "epoch": 17.832, "grad_norm": 27.226789474487305, "learning_rate": 4.568e-05, "loss": 1.2645, "step": 2229 }, { "epoch": 17.84, "grad_norm": 61.318328857421875, "learning_rate": 4.567555555555556e-05, "loss": 0.9152, "step": 2230 }, { "epoch": 17.848, "grad_norm": 37.62213134765625, "learning_rate": 4.567111111111111e-05, "loss": 1.5742, "step": 2231 }, { "epoch": 17.856, "grad_norm": 41.652130126953125, "learning_rate": 4.566666666666667e-05, "loss": 1.0195, "step": 2232 }, { "epoch": 17.864, "grad_norm": 38.02284240722656, "learning_rate": 4.566222222222222e-05, "loss": 1.0487, "step": 2233 }, { "epoch": 17.872, "grad_norm": 19.550273895263672, "learning_rate": 4.5657777777777784e-05, "loss": 1.3101, "step": 2234 }, { "epoch": 17.88, "grad_norm": 24.23748016357422, "learning_rate": 4.565333333333334e-05, "loss": 1.5128, "step": 2235 }, { "epoch": 17.888, "grad_norm": 26.313282012939453, "learning_rate": 4.5648888888888894e-05, "loss": 1.1261, "step": 2236 }, { "epoch": 17.896, "grad_norm": 22.46111488342285, "learning_rate": 4.564444444444444e-05, "loss": 1.522, "step": 2237 }, { "epoch": 17.904, "grad_norm": 22.42604637145996, "learning_rate": 4.564e-05, "loss": 0.9926, "step": 2238 }, { "epoch": 17.912, "grad_norm": 26.59062385559082, "learning_rate": 4.563555555555556e-05, "loss": 1.363, "step": 2239 }, { "epoch": 17.92, "grad_norm": 30.77126121520996, "learning_rate": 4.563111111111111e-05, "loss": 1.6682, "step": 2240 }, { "epoch": 17.928, "grad_norm": 21.775348663330078, "learning_rate": 4.562666666666667e-05, "loss": 1.3349, "step": 2241 }, { "epoch": 17.936, "grad_norm": 162.77435302734375, "learning_rate": 4.562222222222222e-05, "loss": 1.264, "step": 2242 }, { "epoch": 17.944, "grad_norm": 35.38603591918945, "learning_rate": 4.5617777777777784e-05, "loss": 1.4822, "step": 2243 }, { "epoch": 17.951999999999998, "grad_norm": 39.54856491088867, "learning_rate": 4.561333333333333e-05, "loss": 1.4876, "step": 2244 }, { "epoch": 17.96, "grad_norm": 35.13252258300781, "learning_rate": 4.560888888888889e-05, "loss": 1.8882, "step": 2245 }, { "epoch": 17.968, "grad_norm": 23.132129669189453, "learning_rate": 4.560444444444444e-05, "loss": 1.9245, "step": 2246 }, { "epoch": 17.976, "grad_norm": 28.441781997680664, "learning_rate": 4.5600000000000004e-05, "loss": 1.4061, "step": 2247 }, { "epoch": 17.984, "grad_norm": 23.6213321685791, "learning_rate": 4.559555555555556e-05, "loss": 1.835, "step": 2248 }, { "epoch": 17.992, "grad_norm": 77.64689636230469, "learning_rate": 4.5591111111111114e-05, "loss": 1.5565, "step": 2249 }, { "epoch": 18.0, "grad_norm": 21.079172134399414, "learning_rate": 4.558666666666667e-05, "loss": 1.348, "step": 2250 }, { "epoch": 18.0, "eval_loss": 1.555975317955017, "eval_map": 0.2222, "eval_map_50": 0.5072, "eval_map_75": 0.1685, "eval_map_Coverall": 0.4303, "eval_map_Face_Shield": 0.2616, "eval_map_Gloves": 0.1207, "eval_map_Goggles": 0.0954, "eval_map_Mask": 0.2032, "eval_map_large": 0.2895, "eval_map_medium": 0.1355, "eval_map_small": 0.1561, "eval_mar_1": 0.2111, "eval_mar_10": 0.4056, "eval_mar_100": 0.4397, "eval_mar_100_Coverall": 0.6511, "eval_mar_100_Face_Shield": 0.5412, "eval_mar_100_Gloves": 0.2918, "eval_mar_100_Goggles": 0.3469, "eval_mar_100_Mask": 0.3673, "eval_mar_large": 0.5638, "eval_mar_medium": 0.3115, "eval_mar_small": 0.2327, "eval_runtime": 0.9212, "eval_samples_per_second": 31.481, "eval_steps_per_second": 2.171, "step": 2250 }, { "epoch": 18.008, "grad_norm": 39.27637481689453, "learning_rate": 4.558222222222222e-05, "loss": 1.1592, "step": 2251 }, { "epoch": 18.016, "grad_norm": 25.90840721130371, "learning_rate": 4.557777777777778e-05, "loss": 2.0338, "step": 2252 }, { "epoch": 18.024, "grad_norm": 29.795686721801758, "learning_rate": 4.557333333333333e-05, "loss": 1.1087, "step": 2253 }, { "epoch": 18.032, "grad_norm": 18.8885498046875, "learning_rate": 4.556888888888889e-05, "loss": 1.4853, "step": 2254 }, { "epoch": 18.04, "grad_norm": 78.1227035522461, "learning_rate": 4.556444444444445e-05, "loss": 1.5029, "step": 2255 }, { "epoch": 18.048, "grad_norm": 29.977548599243164, "learning_rate": 4.5560000000000004e-05, "loss": 1.5123, "step": 2256 }, { "epoch": 18.056, "grad_norm": 27.310861587524414, "learning_rate": 4.555555555555556e-05, "loss": 1.5881, "step": 2257 }, { "epoch": 18.064, "grad_norm": 29.344375610351562, "learning_rate": 4.5551111111111114e-05, "loss": 1.5745, "step": 2258 }, { "epoch": 18.072, "grad_norm": 32.2817268371582, "learning_rate": 4.554666666666667e-05, "loss": 1.1095, "step": 2259 }, { "epoch": 18.08, "grad_norm": 32.93211364746094, "learning_rate": 4.5542222222222224e-05, "loss": 1.531, "step": 2260 }, { "epoch": 18.088, "grad_norm": 43.110496520996094, "learning_rate": 4.553777777777778e-05, "loss": 1.027, "step": 2261 }, { "epoch": 18.096, "grad_norm": 25.2806453704834, "learning_rate": 4.553333333333333e-05, "loss": 1.277, "step": 2262 }, { "epoch": 18.104, "grad_norm": 27.189836502075195, "learning_rate": 4.5528888888888895e-05, "loss": 1.2463, "step": 2263 }, { "epoch": 18.112, "grad_norm": 22.874380111694336, "learning_rate": 4.552444444444445e-05, "loss": 1.2409, "step": 2264 }, { "epoch": 18.12, "grad_norm": 21.747394561767578, "learning_rate": 4.5520000000000005e-05, "loss": 1.8526, "step": 2265 }, { "epoch": 18.128, "grad_norm": 33.01106262207031, "learning_rate": 4.551555555555555e-05, "loss": 1.6435, "step": 2266 }, { "epoch": 18.136, "grad_norm": 15.208322525024414, "learning_rate": 4.5511111111111114e-05, "loss": 1.2517, "step": 2267 }, { "epoch": 18.144, "grad_norm": 106.44306182861328, "learning_rate": 4.550666666666667e-05, "loss": 1.2929, "step": 2268 }, { "epoch": 18.152, "grad_norm": 24.196619033813477, "learning_rate": 4.5502222222222224e-05, "loss": 1.02, "step": 2269 }, { "epoch": 18.16, "grad_norm": 28.116573333740234, "learning_rate": 4.549777777777778e-05, "loss": 1.3035, "step": 2270 }, { "epoch": 18.168, "grad_norm": 17.193822860717773, "learning_rate": 4.549333333333334e-05, "loss": 1.5238, "step": 2271 }, { "epoch": 18.176, "grad_norm": 29.304609298706055, "learning_rate": 4.5488888888888895e-05, "loss": 1.3817, "step": 2272 }, { "epoch": 18.184, "grad_norm": 29.09669303894043, "learning_rate": 4.5484444444444443e-05, "loss": 1.5727, "step": 2273 }, { "epoch": 18.192, "grad_norm": 34.20956039428711, "learning_rate": 4.548e-05, "loss": 1.1859, "step": 2274 }, { "epoch": 18.2, "grad_norm": 35.651851654052734, "learning_rate": 4.547555555555556e-05, "loss": 1.5719, "step": 2275 }, { "epoch": 18.208, "grad_norm": 40.82365036010742, "learning_rate": 4.5471111111111115e-05, "loss": 1.263, "step": 2276 }, { "epoch": 18.216, "grad_norm": 36.791141510009766, "learning_rate": 4.546666666666667e-05, "loss": 2.6222, "step": 2277 }, { "epoch": 18.224, "grad_norm": 22.762897491455078, "learning_rate": 4.5462222222222224e-05, "loss": 1.3972, "step": 2278 }, { "epoch": 18.232, "grad_norm": 32.3114128112793, "learning_rate": 4.545777777777778e-05, "loss": 2.2847, "step": 2279 }, { "epoch": 18.24, "grad_norm": 38.22654342651367, "learning_rate": 4.5453333333333334e-05, "loss": 1.6536, "step": 2280 }, { "epoch": 18.248, "grad_norm": 25.363340377807617, "learning_rate": 4.544888888888889e-05, "loss": 1.3905, "step": 2281 }, { "epoch": 18.256, "grad_norm": 33.99924087524414, "learning_rate": 4.5444444444444444e-05, "loss": 1.1376, "step": 2282 }, { "epoch": 18.264, "grad_norm": 17.90929412841797, "learning_rate": 4.5440000000000005e-05, "loss": 1.2938, "step": 2283 }, { "epoch": 18.272, "grad_norm": 24.64398956298828, "learning_rate": 4.543555555555556e-05, "loss": 1.376, "step": 2284 }, { "epoch": 18.28, "grad_norm": 25.221298217773438, "learning_rate": 4.5431111111111115e-05, "loss": 1.1778, "step": 2285 }, { "epoch": 18.288, "grad_norm": 42.26108932495117, "learning_rate": 4.542666666666667e-05, "loss": 1.3403, "step": 2286 }, { "epoch": 18.296, "grad_norm": 89.01643371582031, "learning_rate": 4.5422222222222225e-05, "loss": 1.5429, "step": 2287 }, { "epoch": 18.304, "grad_norm": 27.55754280090332, "learning_rate": 4.541777777777778e-05, "loss": 1.5406, "step": 2288 }, { "epoch": 18.312, "grad_norm": 21.182893753051758, "learning_rate": 4.5413333333333334e-05, "loss": 1.1118, "step": 2289 }, { "epoch": 18.32, "grad_norm": 29.869800567626953, "learning_rate": 4.540888888888889e-05, "loss": 1.4584, "step": 2290 }, { "epoch": 18.328, "grad_norm": 65.02388000488281, "learning_rate": 4.5404444444444444e-05, "loss": 1.3255, "step": 2291 }, { "epoch": 18.336, "grad_norm": 19.758974075317383, "learning_rate": 4.5400000000000006e-05, "loss": 1.5147, "step": 2292 }, { "epoch": 18.344, "grad_norm": 32.448421478271484, "learning_rate": 4.539555555555556e-05, "loss": 1.9924, "step": 2293 }, { "epoch": 18.352, "grad_norm": 23.229774475097656, "learning_rate": 4.539111111111111e-05, "loss": 1.6649, "step": 2294 }, { "epoch": 18.36, "grad_norm": 22.511648178100586, "learning_rate": 4.5386666666666664e-05, "loss": 1.6943, "step": 2295 }, { "epoch": 18.368, "grad_norm": 23.82888412475586, "learning_rate": 4.5382222222222225e-05, "loss": 1.2645, "step": 2296 }, { "epoch": 18.376, "grad_norm": 36.08230209350586, "learning_rate": 4.537777777777778e-05, "loss": 1.3739, "step": 2297 }, { "epoch": 18.384, "grad_norm": 22.78040313720703, "learning_rate": 4.5373333333333335e-05, "loss": 1.2508, "step": 2298 }, { "epoch": 18.392, "grad_norm": 24.7854061126709, "learning_rate": 4.536888888888889e-05, "loss": 1.2724, "step": 2299 }, { "epoch": 18.4, "grad_norm": 135.737060546875, "learning_rate": 4.536444444444445e-05, "loss": 1.4749, "step": 2300 }, { "epoch": 18.408, "grad_norm": 35.340633392333984, "learning_rate": 4.536e-05, "loss": 2.0375, "step": 2301 }, { "epoch": 18.416, "grad_norm": 29.6088809967041, "learning_rate": 4.5355555555555554e-05, "loss": 1.4938, "step": 2302 }, { "epoch": 18.424, "grad_norm": 24.477407455444336, "learning_rate": 4.535111111111111e-05, "loss": 1.7267, "step": 2303 }, { "epoch": 18.432, "grad_norm": 23.537654876708984, "learning_rate": 4.534666666666667e-05, "loss": 1.2764, "step": 2304 }, { "epoch": 18.44, "grad_norm": 32.910484313964844, "learning_rate": 4.5342222222222226e-05, "loss": 1.2355, "step": 2305 }, { "epoch": 18.448, "grad_norm": 24.55036163330078, "learning_rate": 4.533777777777778e-05, "loss": 1.4314, "step": 2306 }, { "epoch": 18.456, "grad_norm": 24.530941009521484, "learning_rate": 4.5333333333333335e-05, "loss": 1.1493, "step": 2307 }, { "epoch": 18.464, "grad_norm": 25.830318450927734, "learning_rate": 4.532888888888889e-05, "loss": 1.9081, "step": 2308 }, { "epoch": 18.472, "grad_norm": 28.31300926208496, "learning_rate": 4.5324444444444445e-05, "loss": 1.4592, "step": 2309 }, { "epoch": 18.48, "grad_norm": 14.605720520019531, "learning_rate": 4.532e-05, "loss": 0.9947, "step": 2310 }, { "epoch": 18.488, "grad_norm": 33.026947021484375, "learning_rate": 4.5315555555555555e-05, "loss": 1.2324, "step": 2311 }, { "epoch": 18.496, "grad_norm": 26.996557235717773, "learning_rate": 4.5311111111111116e-05, "loss": 1.2267, "step": 2312 }, { "epoch": 18.504, "grad_norm": 29.971799850463867, "learning_rate": 4.530666666666667e-05, "loss": 1.8207, "step": 2313 }, { "epoch": 18.512, "grad_norm": 28.385847091674805, "learning_rate": 4.5302222222222226e-05, "loss": 1.0018, "step": 2314 }, { "epoch": 18.52, "grad_norm": 39.00154495239258, "learning_rate": 4.529777777777778e-05, "loss": 1.8394, "step": 2315 }, { "epoch": 18.528, "grad_norm": 25.860029220581055, "learning_rate": 4.5293333333333336e-05, "loss": 1.3284, "step": 2316 }, { "epoch": 18.536, "grad_norm": 19.056148529052734, "learning_rate": 4.528888888888889e-05, "loss": 1.391, "step": 2317 }, { "epoch": 18.544, "grad_norm": 35.93590545654297, "learning_rate": 4.5284444444444445e-05, "loss": 1.4426, "step": 2318 }, { "epoch": 18.552, "grad_norm": 30.64774513244629, "learning_rate": 4.528e-05, "loss": 1.308, "step": 2319 }, { "epoch": 18.56, "grad_norm": 184.47332763671875, "learning_rate": 4.527555555555556e-05, "loss": 1.7185, "step": 2320 }, { "epoch": 18.568, "grad_norm": 32.44892501831055, "learning_rate": 4.527111111111112e-05, "loss": 1.474, "step": 2321 }, { "epoch": 18.576, "grad_norm": 30.520952224731445, "learning_rate": 4.526666666666667e-05, "loss": 1.5231, "step": 2322 }, { "epoch": 18.584, "grad_norm": 23.873218536376953, "learning_rate": 4.526222222222222e-05, "loss": 2.789, "step": 2323 }, { "epoch": 18.592, "grad_norm": 24.111940383911133, "learning_rate": 4.525777777777778e-05, "loss": 1.5339, "step": 2324 }, { "epoch": 18.6, "grad_norm": 49.7336311340332, "learning_rate": 4.5253333333333336e-05, "loss": 1.4229, "step": 2325 }, { "epoch": 18.608, "grad_norm": 21.86642837524414, "learning_rate": 4.524888888888889e-05, "loss": 1.4944, "step": 2326 }, { "epoch": 18.616, "grad_norm": 32.2965202331543, "learning_rate": 4.5244444444444446e-05, "loss": 1.5338, "step": 2327 }, { "epoch": 18.624, "grad_norm": 42.62073516845703, "learning_rate": 4.524000000000001e-05, "loss": 2.7899, "step": 2328 }, { "epoch": 18.632, "grad_norm": 22.382396697998047, "learning_rate": 4.523555555555556e-05, "loss": 1.3358, "step": 2329 }, { "epoch": 18.64, "grad_norm": 29.527950286865234, "learning_rate": 4.523111111111111e-05, "loss": 1.7366, "step": 2330 }, { "epoch": 18.648, "grad_norm": 34.102195739746094, "learning_rate": 4.5226666666666665e-05, "loss": 1.4122, "step": 2331 }, { "epoch": 18.656, "grad_norm": 45.92996597290039, "learning_rate": 4.522222222222223e-05, "loss": 1.2295, "step": 2332 }, { "epoch": 18.664, "grad_norm": 23.66159439086914, "learning_rate": 4.521777777777778e-05, "loss": 1.315, "step": 2333 }, { "epoch": 18.672, "grad_norm": 21.548511505126953, "learning_rate": 4.5213333333333336e-05, "loss": 1.4284, "step": 2334 }, { "epoch": 18.68, "grad_norm": 28.87291145324707, "learning_rate": 4.520888888888889e-05, "loss": 1.5284, "step": 2335 }, { "epoch": 18.688, "grad_norm": 30.650619506835938, "learning_rate": 4.5204444444444446e-05, "loss": 1.1984, "step": 2336 }, { "epoch": 18.696, "grad_norm": 20.64787483215332, "learning_rate": 4.52e-05, "loss": 1.5255, "step": 2337 }, { "epoch": 18.704, "grad_norm": 33.1988525390625, "learning_rate": 4.5195555555555556e-05, "loss": 0.9608, "step": 2338 }, { "epoch": 18.712, "grad_norm": 52.07280349731445, "learning_rate": 4.519111111111111e-05, "loss": 1.5257, "step": 2339 }, { "epoch": 18.72, "grad_norm": 22.803966522216797, "learning_rate": 4.518666666666667e-05, "loss": 1.1044, "step": 2340 }, { "epoch": 18.728, "grad_norm": 26.818065643310547, "learning_rate": 4.518222222222223e-05, "loss": 1.1217, "step": 2341 }, { "epoch": 18.736, "grad_norm": 19.763118743896484, "learning_rate": 4.517777777777778e-05, "loss": 1.3148, "step": 2342 }, { "epoch": 18.744, "grad_norm": 31.89238166809082, "learning_rate": 4.517333333333334e-05, "loss": 1.3848, "step": 2343 }, { "epoch": 18.752, "grad_norm": 46.09971237182617, "learning_rate": 4.516888888888889e-05, "loss": 1.7415, "step": 2344 }, { "epoch": 18.76, "grad_norm": 15.498644828796387, "learning_rate": 4.5164444444444446e-05, "loss": 1.073, "step": 2345 }, { "epoch": 18.768, "grad_norm": 44.908172607421875, "learning_rate": 4.516e-05, "loss": 1.554, "step": 2346 }, { "epoch": 18.776, "grad_norm": 21.143230438232422, "learning_rate": 4.5155555555555556e-05, "loss": 1.647, "step": 2347 }, { "epoch": 18.784, "grad_norm": 23.36370086669922, "learning_rate": 4.515111111111111e-05, "loss": 1.1886, "step": 2348 }, { "epoch": 18.792, "grad_norm": 100.68673706054688, "learning_rate": 4.514666666666667e-05, "loss": 1.7353, "step": 2349 }, { "epoch": 18.8, "grad_norm": 26.720592498779297, "learning_rate": 4.514222222222223e-05, "loss": 1.3866, "step": 2350 }, { "epoch": 18.808, "grad_norm": 49.8173942565918, "learning_rate": 4.5137777777777776e-05, "loss": 1.96, "step": 2351 }, { "epoch": 18.816, "grad_norm": 63.9565544128418, "learning_rate": 4.513333333333333e-05, "loss": 1.1757, "step": 2352 }, { "epoch": 18.824, "grad_norm": 42.106712341308594, "learning_rate": 4.512888888888889e-05, "loss": 1.2357, "step": 2353 }, { "epoch": 18.832, "grad_norm": 31.56462860107422, "learning_rate": 4.512444444444445e-05, "loss": 1.3733, "step": 2354 }, { "epoch": 18.84, "grad_norm": 21.1319637298584, "learning_rate": 4.512e-05, "loss": 1.4823, "step": 2355 }, { "epoch": 18.848, "grad_norm": 34.14524459838867, "learning_rate": 4.5115555555555557e-05, "loss": 1.2865, "step": 2356 }, { "epoch": 18.856, "grad_norm": 22.864770889282227, "learning_rate": 4.511111111111112e-05, "loss": 1.5958, "step": 2357 }, { "epoch": 18.864, "grad_norm": 24.867429733276367, "learning_rate": 4.5106666666666666e-05, "loss": 1.333, "step": 2358 }, { "epoch": 18.872, "grad_norm": 26.27091407775879, "learning_rate": 4.510222222222222e-05, "loss": 1.3423, "step": 2359 }, { "epoch": 18.88, "grad_norm": 95.90200805664062, "learning_rate": 4.5097777777777776e-05, "loss": 1.3208, "step": 2360 }, { "epoch": 18.888, "grad_norm": 27.910083770751953, "learning_rate": 4.509333333333334e-05, "loss": 1.1392, "step": 2361 }, { "epoch": 18.896, "grad_norm": 20.1210994720459, "learning_rate": 4.508888888888889e-05, "loss": 1.0352, "step": 2362 }, { "epoch": 18.904, "grad_norm": 34.869510650634766, "learning_rate": 4.508444444444445e-05, "loss": 0.9896, "step": 2363 }, { "epoch": 18.912, "grad_norm": 16.115215301513672, "learning_rate": 4.508e-05, "loss": 1.3324, "step": 2364 }, { "epoch": 18.92, "grad_norm": 20.46091651916504, "learning_rate": 4.507555555555556e-05, "loss": 1.4303, "step": 2365 }, { "epoch": 18.928, "grad_norm": 54.613929748535156, "learning_rate": 4.507111111111111e-05, "loss": 1.1873, "step": 2366 }, { "epoch": 18.936, "grad_norm": 32.45579528808594, "learning_rate": 4.5066666666666667e-05, "loss": 1.18, "step": 2367 }, { "epoch": 18.944, "grad_norm": 54.033721923828125, "learning_rate": 4.506222222222222e-05, "loss": 1.4919, "step": 2368 }, { "epoch": 18.951999999999998, "grad_norm": 26.527544021606445, "learning_rate": 4.505777777777778e-05, "loss": 1.1893, "step": 2369 }, { "epoch": 18.96, "grad_norm": 41.7343864440918, "learning_rate": 4.505333333333334e-05, "loss": 3.0022, "step": 2370 }, { "epoch": 18.968, "grad_norm": 26.643407821655273, "learning_rate": 4.504888888888889e-05, "loss": 1.3128, "step": 2371 }, { "epoch": 18.976, "grad_norm": 31.772109985351562, "learning_rate": 4.504444444444445e-05, "loss": 1.0626, "step": 2372 }, { "epoch": 18.984, "grad_norm": 18.387117385864258, "learning_rate": 4.504e-05, "loss": 1.3592, "step": 2373 }, { "epoch": 18.992, "grad_norm": 29.135366439819336, "learning_rate": 4.503555555555556e-05, "loss": 1.6521, "step": 2374 }, { "epoch": 19.0, "grad_norm": 36.198692321777344, "learning_rate": 4.503111111111111e-05, "loss": 1.8579, "step": 2375 }, { "epoch": 19.0, "eval_loss": 1.4963966608047485, "eval_map": 0.2174, "eval_map_50": 0.486, "eval_map_75": 0.1952, "eval_map_Coverall": 0.4415, "eval_map_Face_Shield": 0.3028, "eval_map_Gloves": 0.1155, "eval_map_Goggles": 0.0441, "eval_map_Mask": 0.1834, "eval_map_large": 0.2979, "eval_map_medium": 0.1411, "eval_map_small": 0.0603, "eval_mar_1": 0.2069, "eval_mar_10": 0.3917, "eval_mar_100": 0.4105, "eval_mar_100_Coverall": 0.6556, "eval_mar_100_Face_Shield": 0.5294, "eval_mar_100_Gloves": 0.3049, "eval_mar_100_Goggles": 0.2281, "eval_mar_100_Mask": 0.3346, "eval_mar_large": 0.616, "eval_mar_medium": 0.2885, "eval_mar_small": 0.0822, "eval_runtime": 0.912, "eval_samples_per_second": 31.798, "eval_steps_per_second": 2.193, "step": 2375 }, { "epoch": 19.008, "grad_norm": 28.054397583007812, "learning_rate": 4.502666666666667e-05, "loss": 1.1685, "step": 2376 }, { "epoch": 19.016, "grad_norm": 19.709463119506836, "learning_rate": 4.502222222222223e-05, "loss": 1.2154, "step": 2377 }, { "epoch": 19.024, "grad_norm": 22.54465675354004, "learning_rate": 4.5017777777777783e-05, "loss": 1.4094, "step": 2378 }, { "epoch": 19.032, "grad_norm": 54.05628204345703, "learning_rate": 4.501333333333334e-05, "loss": 1.8284, "step": 2379 }, { "epoch": 19.04, "grad_norm": 23.313566207885742, "learning_rate": 4.5008888888888886e-05, "loss": 0.9968, "step": 2380 }, { "epoch": 19.048, "grad_norm": 27.14885711669922, "learning_rate": 4.500444444444445e-05, "loss": 1.3352, "step": 2381 }, { "epoch": 19.056, "grad_norm": 37.08219909667969, "learning_rate": 4.5e-05, "loss": 1.1566, "step": 2382 }, { "epoch": 19.064, "grad_norm": 68.81546783447266, "learning_rate": 4.499555555555556e-05, "loss": 1.747, "step": 2383 }, { "epoch": 19.072, "grad_norm": 23.045137405395508, "learning_rate": 4.499111111111111e-05, "loss": 1.6627, "step": 2384 }, { "epoch": 19.08, "grad_norm": 33.17257308959961, "learning_rate": 4.4986666666666674e-05, "loss": 1.2783, "step": 2385 }, { "epoch": 19.088, "grad_norm": 34.45427322387695, "learning_rate": 4.498222222222222e-05, "loss": 1.2796, "step": 2386 }, { "epoch": 19.096, "grad_norm": 37.60655975341797, "learning_rate": 4.497777777777778e-05, "loss": 1.5579, "step": 2387 }, { "epoch": 19.104, "grad_norm": 27.357402801513672, "learning_rate": 4.497333333333333e-05, "loss": 1.4233, "step": 2388 }, { "epoch": 19.112, "grad_norm": 32.927486419677734, "learning_rate": 4.4968888888888894e-05, "loss": 1.3269, "step": 2389 }, { "epoch": 19.12, "grad_norm": 15.991405487060547, "learning_rate": 4.496444444444445e-05, "loss": 1.2583, "step": 2390 }, { "epoch": 19.128, "grad_norm": 47.15363311767578, "learning_rate": 4.496e-05, "loss": 1.8613, "step": 2391 }, { "epoch": 19.136, "grad_norm": 67.13084411621094, "learning_rate": 4.495555555555556e-05, "loss": 1.4709, "step": 2392 }, { "epoch": 19.144, "grad_norm": 30.676990509033203, "learning_rate": 4.495111111111111e-05, "loss": 1.4222, "step": 2393 }, { "epoch": 19.152, "grad_norm": 32.20746994018555, "learning_rate": 4.494666666666667e-05, "loss": 1.0201, "step": 2394 }, { "epoch": 19.16, "grad_norm": 18.941328048706055, "learning_rate": 4.494222222222222e-05, "loss": 0.9787, "step": 2395 }, { "epoch": 19.168, "grad_norm": 36.8806037902832, "learning_rate": 4.493777777777778e-05, "loss": 1.282, "step": 2396 }, { "epoch": 19.176, "grad_norm": 27.395986557006836, "learning_rate": 4.493333333333333e-05, "loss": 1.2617, "step": 2397 }, { "epoch": 19.184, "grad_norm": 25.914941787719727, "learning_rate": 4.4928888888888894e-05, "loss": 1.2841, "step": 2398 }, { "epoch": 19.192, "grad_norm": 38.09187698364258, "learning_rate": 4.492444444444445e-05, "loss": 1.188, "step": 2399 }, { "epoch": 19.2, "grad_norm": 16.89827537536621, "learning_rate": 4.4920000000000004e-05, "loss": 1.4382, "step": 2400 }, { "epoch": 19.208, "grad_norm": 26.04709243774414, "learning_rate": 4.491555555555556e-05, "loss": 1.0032, "step": 2401 }, { "epoch": 19.216, "grad_norm": 50.58066177368164, "learning_rate": 4.491111111111111e-05, "loss": 1.1894, "step": 2402 }, { "epoch": 19.224, "grad_norm": 30.93265151977539, "learning_rate": 4.490666666666667e-05, "loss": 1.7017, "step": 2403 }, { "epoch": 19.232, "grad_norm": 44.49715805053711, "learning_rate": 4.490222222222222e-05, "loss": 1.6187, "step": 2404 }, { "epoch": 19.24, "grad_norm": 24.327653884887695, "learning_rate": 4.489777777777778e-05, "loss": 1.7145, "step": 2405 }, { "epoch": 19.248, "grad_norm": 203.33839416503906, "learning_rate": 4.489333333333334e-05, "loss": 1.1066, "step": 2406 }, { "epoch": 19.256, "grad_norm": 28.181825637817383, "learning_rate": 4.4888888888888894e-05, "loss": 1.118, "step": 2407 }, { "epoch": 19.264, "grad_norm": 25.698030471801758, "learning_rate": 4.488444444444444e-05, "loss": 1.8274, "step": 2408 }, { "epoch": 19.272, "grad_norm": 53.18385314941406, "learning_rate": 4.488e-05, "loss": 1.2948, "step": 2409 }, { "epoch": 19.28, "grad_norm": 40.08729553222656, "learning_rate": 4.487555555555556e-05, "loss": 1.7748, "step": 2410 }, { "epoch": 19.288, "grad_norm": 20.878965377807617, "learning_rate": 4.4871111111111114e-05, "loss": 1.3748, "step": 2411 }, { "epoch": 19.296, "grad_norm": 19.06516456604004, "learning_rate": 4.486666666666667e-05, "loss": 1.6255, "step": 2412 }, { "epoch": 19.304, "grad_norm": 25.678281784057617, "learning_rate": 4.486222222222222e-05, "loss": 1.3728, "step": 2413 }, { "epoch": 19.312, "grad_norm": 27.49025535583496, "learning_rate": 4.4857777777777785e-05, "loss": 2.1744, "step": 2414 }, { "epoch": 19.32, "grad_norm": 32.201576232910156, "learning_rate": 4.485333333333333e-05, "loss": 1.3495, "step": 2415 }, { "epoch": 19.328, "grad_norm": 132.72975158691406, "learning_rate": 4.484888888888889e-05, "loss": 1.0244, "step": 2416 }, { "epoch": 19.336, "grad_norm": 24.97991943359375, "learning_rate": 4.484444444444444e-05, "loss": 1.5475, "step": 2417 }, { "epoch": 19.344, "grad_norm": 45.02299880981445, "learning_rate": 4.4840000000000004e-05, "loss": 1.3713, "step": 2418 }, { "epoch": 19.352, "grad_norm": 24.36662483215332, "learning_rate": 4.483555555555556e-05, "loss": 1.2121, "step": 2419 }, { "epoch": 19.36, "grad_norm": 34.56914138793945, "learning_rate": 4.4831111111111114e-05, "loss": 1.1793, "step": 2420 }, { "epoch": 19.368, "grad_norm": 36.703060150146484, "learning_rate": 4.482666666666667e-05, "loss": 1.2229, "step": 2421 }, { "epoch": 19.376, "grad_norm": 57.143924713134766, "learning_rate": 4.4822222222222224e-05, "loss": 1.3243, "step": 2422 }, { "epoch": 19.384, "grad_norm": 18.747976303100586, "learning_rate": 4.481777777777778e-05, "loss": 1.3377, "step": 2423 }, { "epoch": 19.392, "grad_norm": 33.51019287109375, "learning_rate": 4.4813333333333333e-05, "loss": 1.3543, "step": 2424 }, { "epoch": 19.4, "grad_norm": 35.66767501831055, "learning_rate": 4.480888888888889e-05, "loss": 1.1416, "step": 2425 }, { "epoch": 19.408, "grad_norm": 41.38368606567383, "learning_rate": 4.480444444444445e-05, "loss": 1.4602, "step": 2426 }, { "epoch": 19.416, "grad_norm": 40.91952133178711, "learning_rate": 4.4800000000000005e-05, "loss": 1.3863, "step": 2427 }, { "epoch": 19.424, "grad_norm": 24.177711486816406, "learning_rate": 4.479555555555556e-05, "loss": 2.9688, "step": 2428 }, { "epoch": 19.432, "grad_norm": 24.903982162475586, "learning_rate": 4.4791111111111114e-05, "loss": 1.1128, "step": 2429 }, { "epoch": 19.44, "grad_norm": 48.10315704345703, "learning_rate": 4.478666666666667e-05, "loss": 1.034, "step": 2430 }, { "epoch": 19.448, "grad_norm": 43.9191780090332, "learning_rate": 4.4782222222222224e-05, "loss": 1.2311, "step": 2431 }, { "epoch": 19.456, "grad_norm": 20.82588005065918, "learning_rate": 4.477777777777778e-05, "loss": 1.4446, "step": 2432 }, { "epoch": 19.464, "grad_norm": 78.22572326660156, "learning_rate": 4.4773333333333334e-05, "loss": 1.2241, "step": 2433 }, { "epoch": 19.472, "grad_norm": 53.28805160522461, "learning_rate": 4.4768888888888895e-05, "loss": 1.4115, "step": 2434 }, { "epoch": 19.48, "grad_norm": 30.879304885864258, "learning_rate": 4.476444444444445e-05, "loss": 1.5316, "step": 2435 }, { "epoch": 19.488, "grad_norm": 25.69309425354004, "learning_rate": 4.4760000000000005e-05, "loss": 1.3489, "step": 2436 }, { "epoch": 19.496, "grad_norm": 35.025882720947266, "learning_rate": 4.475555555555555e-05, "loss": 1.6159, "step": 2437 }, { "epoch": 19.504, "grad_norm": 59.99488067626953, "learning_rate": 4.4751111111111115e-05, "loss": 1.4961, "step": 2438 }, { "epoch": 19.512, "grad_norm": 36.31069564819336, "learning_rate": 4.474666666666667e-05, "loss": 1.8748, "step": 2439 }, { "epoch": 19.52, "grad_norm": 50.717044830322266, "learning_rate": 4.4742222222222225e-05, "loss": 1.1664, "step": 2440 }, { "epoch": 19.528, "grad_norm": 54.97786331176758, "learning_rate": 4.473777777777778e-05, "loss": 1.7636, "step": 2441 }, { "epoch": 19.536, "grad_norm": 37.052974700927734, "learning_rate": 4.473333333333334e-05, "loss": 1.3875, "step": 2442 }, { "epoch": 19.544, "grad_norm": 43.344093322753906, "learning_rate": 4.472888888888889e-05, "loss": 1.5727, "step": 2443 }, { "epoch": 19.552, "grad_norm": 24.264354705810547, "learning_rate": 4.4724444444444444e-05, "loss": 1.249, "step": 2444 }, { "epoch": 19.56, "grad_norm": 16.707923889160156, "learning_rate": 4.472e-05, "loss": 0.9837, "step": 2445 }, { "epoch": 19.568, "grad_norm": 40.15495681762695, "learning_rate": 4.4715555555555554e-05, "loss": 1.399, "step": 2446 }, { "epoch": 19.576, "grad_norm": 19.208850860595703, "learning_rate": 4.4711111111111115e-05, "loss": 1.4746, "step": 2447 }, { "epoch": 19.584, "grad_norm": 95.15343475341797, "learning_rate": 4.470666666666667e-05, "loss": 2.7546, "step": 2448 }, { "epoch": 19.592, "grad_norm": 52.892173767089844, "learning_rate": 4.4702222222222225e-05, "loss": 1.0896, "step": 2449 }, { "epoch": 19.6, "grad_norm": 20.10940933227539, "learning_rate": 4.469777777777778e-05, "loss": 1.0714, "step": 2450 }, { "epoch": 19.608, "grad_norm": 32.47422790527344, "learning_rate": 4.4693333333333335e-05, "loss": 1.2254, "step": 2451 }, { "epoch": 19.616, "grad_norm": 23.370891571044922, "learning_rate": 4.468888888888889e-05, "loss": 1.1951, "step": 2452 }, { "epoch": 19.624, "grad_norm": 19.420988082885742, "learning_rate": 4.4684444444444444e-05, "loss": 1.1498, "step": 2453 }, { "epoch": 19.632, "grad_norm": 41.64096450805664, "learning_rate": 4.468e-05, "loss": 1.8791, "step": 2454 }, { "epoch": 19.64, "grad_norm": 43.871456146240234, "learning_rate": 4.467555555555556e-05, "loss": 1.2267, "step": 2455 }, { "epoch": 19.648, "grad_norm": 29.407146453857422, "learning_rate": 4.4671111111111116e-05, "loss": 1.3588, "step": 2456 }, { "epoch": 19.656, "grad_norm": 22.78312873840332, "learning_rate": 4.466666666666667e-05, "loss": 2.3834, "step": 2457 }, { "epoch": 19.664, "grad_norm": 42.85343933105469, "learning_rate": 4.4662222222222225e-05, "loss": 1.6254, "step": 2458 }, { "epoch": 19.672, "grad_norm": 43.616214752197266, "learning_rate": 4.465777777777778e-05, "loss": 1.1354, "step": 2459 }, { "epoch": 19.68, "grad_norm": 50.52040481567383, "learning_rate": 4.4653333333333335e-05, "loss": 1.3105, "step": 2460 }, { "epoch": 19.688, "grad_norm": 42.842384338378906, "learning_rate": 4.464888888888889e-05, "loss": 1.6833, "step": 2461 }, { "epoch": 19.696, "grad_norm": 19.25401496887207, "learning_rate": 4.4644444444444445e-05, "loss": 1.4852, "step": 2462 }, { "epoch": 19.704, "grad_norm": 32.67308044433594, "learning_rate": 4.4640000000000006e-05, "loss": 1.2173, "step": 2463 }, { "epoch": 19.712, "grad_norm": 25.961767196655273, "learning_rate": 4.463555555555556e-05, "loss": 1.2233, "step": 2464 }, { "epoch": 19.72, "grad_norm": 77.8871841430664, "learning_rate": 4.463111111111111e-05, "loss": 1.6713, "step": 2465 }, { "epoch": 19.728, "grad_norm": 21.109800338745117, "learning_rate": 4.4626666666666664e-05, "loss": 1.5235, "step": 2466 }, { "epoch": 19.736, "grad_norm": 54.141143798828125, "learning_rate": 4.4622222222222226e-05, "loss": 1.0598, "step": 2467 }, { "epoch": 19.744, "grad_norm": 40.34381103515625, "learning_rate": 4.461777777777778e-05, "loss": 2.2132, "step": 2468 }, { "epoch": 19.752, "grad_norm": 53.14034652709961, "learning_rate": 4.4613333333333335e-05, "loss": 1.7143, "step": 2469 }, { "epoch": 19.76, "grad_norm": 24.45149040222168, "learning_rate": 4.460888888888889e-05, "loss": 1.1571, "step": 2470 }, { "epoch": 19.768, "grad_norm": 15.26447868347168, "learning_rate": 4.460444444444445e-05, "loss": 2.2154, "step": 2471 }, { "epoch": 19.776, "grad_norm": 17.848403930664062, "learning_rate": 4.46e-05, "loss": 1.284, "step": 2472 }, { "epoch": 19.784, "grad_norm": 30.920726776123047, "learning_rate": 4.4595555555555555e-05, "loss": 1.5412, "step": 2473 }, { "epoch": 19.792, "grad_norm": 22.52800941467285, "learning_rate": 4.459111111111111e-05, "loss": 1.3611, "step": 2474 }, { "epoch": 19.8, "grad_norm": 36.16841506958008, "learning_rate": 4.458666666666667e-05, "loss": 1.2999, "step": 2475 }, { "epoch": 19.808, "grad_norm": 23.12425994873047, "learning_rate": 4.4582222222222226e-05, "loss": 1.3412, "step": 2476 }, { "epoch": 19.816, "grad_norm": 76.05299377441406, "learning_rate": 4.457777777777778e-05, "loss": 1.1811, "step": 2477 }, { "epoch": 19.824, "grad_norm": 33.76369857788086, "learning_rate": 4.4573333333333336e-05, "loss": 1.3095, "step": 2478 }, { "epoch": 19.832, "grad_norm": 25.437240600585938, "learning_rate": 4.456888888888889e-05, "loss": 0.9661, "step": 2479 }, { "epoch": 19.84, "grad_norm": 66.2925033569336, "learning_rate": 4.4564444444444445e-05, "loss": 1.3403, "step": 2480 }, { "epoch": 19.848, "grad_norm": 22.208351135253906, "learning_rate": 4.456e-05, "loss": 0.9637, "step": 2481 }, { "epoch": 19.856, "grad_norm": 53.7769660949707, "learning_rate": 4.4555555555555555e-05, "loss": 1.2482, "step": 2482 }, { "epoch": 19.864, "grad_norm": 39.316986083984375, "learning_rate": 4.455111111111112e-05, "loss": 1.4046, "step": 2483 }, { "epoch": 19.872, "grad_norm": 41.12799072265625, "learning_rate": 4.454666666666667e-05, "loss": 1.0961, "step": 2484 }, { "epoch": 19.88, "grad_norm": 34.96120071411133, "learning_rate": 4.4542222222222226e-05, "loss": 1.414, "step": 2485 }, { "epoch": 19.888, "grad_norm": 24.803991317749023, "learning_rate": 4.453777777777778e-05, "loss": 1.6553, "step": 2486 }, { "epoch": 19.896, "grad_norm": 118.50177001953125, "learning_rate": 4.4533333333333336e-05, "loss": 1.2727, "step": 2487 }, { "epoch": 19.904, "grad_norm": 33.91422653198242, "learning_rate": 4.452888888888889e-05, "loss": 1.1007, "step": 2488 }, { "epoch": 19.912, "grad_norm": 32.72799301147461, "learning_rate": 4.4524444444444446e-05, "loss": 1.1941, "step": 2489 }, { "epoch": 19.92, "grad_norm": 18.919645309448242, "learning_rate": 4.452e-05, "loss": 1.3757, "step": 2490 }, { "epoch": 19.928, "grad_norm": 31.399166107177734, "learning_rate": 4.451555555555556e-05, "loss": 0.967, "step": 2491 }, { "epoch": 19.936, "grad_norm": 23.487707138061523, "learning_rate": 4.451111111111112e-05, "loss": 1.1326, "step": 2492 }, { "epoch": 19.944, "grad_norm": 33.40894317626953, "learning_rate": 4.450666666666667e-05, "loss": 1.0493, "step": 2493 }, { "epoch": 19.951999999999998, "grad_norm": 32.225215911865234, "learning_rate": 4.450222222222222e-05, "loss": 1.2897, "step": 2494 }, { "epoch": 19.96, "grad_norm": 31.30081558227539, "learning_rate": 4.449777777777778e-05, "loss": 1.1952, "step": 2495 }, { "epoch": 19.968, "grad_norm": 23.230497360229492, "learning_rate": 4.4493333333333337e-05, "loss": 0.9623, "step": 2496 }, { "epoch": 19.976, "grad_norm": 30.08828353881836, "learning_rate": 4.448888888888889e-05, "loss": 1.2135, "step": 2497 }, { "epoch": 19.984, "grad_norm": 29.39976692199707, "learning_rate": 4.4484444444444446e-05, "loss": 1.1888, "step": 2498 }, { "epoch": 19.992, "grad_norm": 59.593448638916016, "learning_rate": 4.448e-05, "loss": 1.3711, "step": 2499 }, { "epoch": 20.0, "grad_norm": 32.95857238769531, "learning_rate": 4.4475555555555556e-05, "loss": 1.7153, "step": 2500 }, { "epoch": 20.0, "eval_loss": 1.3739372491836548, "eval_map": 0.2638, "eval_map_50": 0.5453, "eval_map_75": 0.2335, "eval_map_Coverall": 0.4845, "eval_map_Face_Shield": 0.2621, "eval_map_Gloves": 0.205, "eval_map_Goggles": 0.0892, "eval_map_Mask": 0.2783, "eval_map_large": 0.3711, "eval_map_medium": 0.1755, "eval_map_small": 0.1292, "eval_mar_1": 0.2337, "eval_mar_10": 0.4398, "eval_mar_100": 0.4573, "eval_mar_100_Coverall": 0.6867, "eval_mar_100_Face_Shield": 0.4824, "eval_mar_100_Gloves": 0.3738, "eval_mar_100_Goggles": 0.3281, "eval_mar_100_Mask": 0.4154, "eval_mar_large": 0.6257, "eval_mar_medium": 0.331, "eval_mar_small": 0.1793, "eval_runtime": 0.9043, "eval_samples_per_second": 32.069, "eval_steps_per_second": 2.212, "step": 2500 }, { "epoch": 20.008, "grad_norm": 41.769615173339844, "learning_rate": 4.447111111111111e-05, "loss": 1.4533, "step": 2501 }, { "epoch": 20.016, "grad_norm": 31.173080444335938, "learning_rate": 4.4466666666666666e-05, "loss": 1.3361, "step": 2502 }, { "epoch": 20.024, "grad_norm": 29.128894805908203, "learning_rate": 4.446222222222222e-05, "loss": 1.0482, "step": 2503 }, { "epoch": 20.032, "grad_norm": 33.81040954589844, "learning_rate": 4.445777777777778e-05, "loss": 1.4701, "step": 2504 }, { "epoch": 20.04, "grad_norm": 22.205984115600586, "learning_rate": 4.445333333333334e-05, "loss": 1.4975, "step": 2505 }, { "epoch": 20.048, "grad_norm": 37.94432067871094, "learning_rate": 4.444888888888889e-05, "loss": 1.2118, "step": 2506 }, { "epoch": 20.056, "grad_norm": 27.14864158630371, "learning_rate": 4.4444444444444447e-05, "loss": 1.2246, "step": 2507 }, { "epoch": 20.064, "grad_norm": 87.8592529296875, "learning_rate": 4.444e-05, "loss": 2.0071, "step": 2508 }, { "epoch": 20.072, "grad_norm": 19.552635192871094, "learning_rate": 4.4435555555555556e-05, "loss": 1.4592, "step": 2509 }, { "epoch": 20.08, "grad_norm": 59.89681625366211, "learning_rate": 4.443111111111111e-05, "loss": 1.7942, "step": 2510 }, { "epoch": 20.088, "grad_norm": 37.27604293823242, "learning_rate": 4.4426666666666666e-05, "loss": 1.203, "step": 2511 }, { "epoch": 20.096, "grad_norm": 70.03993225097656, "learning_rate": 4.442222222222223e-05, "loss": 1.5122, "step": 2512 }, { "epoch": 20.104, "grad_norm": 36.79780960083008, "learning_rate": 4.441777777777778e-05, "loss": 1.4622, "step": 2513 }, { "epoch": 20.112, "grad_norm": 40.47228240966797, "learning_rate": 4.441333333333334e-05, "loss": 1.3713, "step": 2514 }, { "epoch": 20.12, "grad_norm": 22.099828720092773, "learning_rate": 4.440888888888889e-05, "loss": 1.5184, "step": 2515 }, { "epoch": 20.128, "grad_norm": 19.284223556518555, "learning_rate": 4.440444444444445e-05, "loss": 1.3075, "step": 2516 }, { "epoch": 20.136, "grad_norm": 56.26647186279297, "learning_rate": 4.44e-05, "loss": 1.1926, "step": 2517 }, { "epoch": 20.144, "grad_norm": 20.929107666015625, "learning_rate": 4.439555555555556e-05, "loss": 1.3758, "step": 2518 }, { "epoch": 20.152, "grad_norm": 24.985931396484375, "learning_rate": 4.439111111111111e-05, "loss": 1.2162, "step": 2519 }, { "epoch": 20.16, "grad_norm": 26.728057861328125, "learning_rate": 4.438666666666667e-05, "loss": 1.3591, "step": 2520 }, { "epoch": 20.168, "grad_norm": 24.61381721496582, "learning_rate": 4.438222222222223e-05, "loss": 1.311, "step": 2521 }, { "epoch": 20.176, "grad_norm": 27.09568214416504, "learning_rate": 4.4377777777777776e-05, "loss": 1.3996, "step": 2522 }, { "epoch": 20.184, "grad_norm": 31.599456787109375, "learning_rate": 4.437333333333333e-05, "loss": 2.3765, "step": 2523 }, { "epoch": 20.192, "grad_norm": 22.75594711303711, "learning_rate": 4.436888888888889e-05, "loss": 1.6624, "step": 2524 }, { "epoch": 20.2, "grad_norm": 22.739606857299805, "learning_rate": 4.436444444444445e-05, "loss": 1.5004, "step": 2525 }, { "epoch": 20.208, "grad_norm": 97.73860168457031, "learning_rate": 4.436e-05, "loss": 1.891, "step": 2526 }, { "epoch": 20.216, "grad_norm": 30.718017578125, "learning_rate": 4.435555555555556e-05, "loss": 1.3434, "step": 2527 }, { "epoch": 20.224, "grad_norm": 27.812053680419922, "learning_rate": 4.435111111111112e-05, "loss": 0.8749, "step": 2528 }, { "epoch": 20.232, "grad_norm": 31.890520095825195, "learning_rate": 4.434666666666667e-05, "loss": 1.5577, "step": 2529 }, { "epoch": 20.24, "grad_norm": 23.966167449951172, "learning_rate": 4.434222222222222e-05, "loss": 1.2031, "step": 2530 }, { "epoch": 20.248, "grad_norm": 33.25513458251953, "learning_rate": 4.4337777777777776e-05, "loss": 1.4749, "step": 2531 }, { "epoch": 20.256, "grad_norm": 20.34005355834961, "learning_rate": 4.433333333333334e-05, "loss": 1.3257, "step": 2532 }, { "epoch": 20.264, "grad_norm": 89.31793212890625, "learning_rate": 4.432888888888889e-05, "loss": 1.1142, "step": 2533 }, { "epoch": 20.272, "grad_norm": 21.9874210357666, "learning_rate": 4.432444444444445e-05, "loss": 1.5712, "step": 2534 }, { "epoch": 20.28, "grad_norm": 19.732929229736328, "learning_rate": 4.432e-05, "loss": 1.1111, "step": 2535 }, { "epoch": 20.288, "grad_norm": 34.60763168334961, "learning_rate": 4.431555555555556e-05, "loss": 0.9725, "step": 2536 }, { "epoch": 20.296, "grad_norm": 21.09970474243164, "learning_rate": 4.431111111111111e-05, "loss": 0.9571, "step": 2537 }, { "epoch": 20.304, "grad_norm": 20.277603149414062, "learning_rate": 4.430666666666667e-05, "loss": 1.2645, "step": 2538 }, { "epoch": 20.312, "grad_norm": 179.51190185546875, "learning_rate": 4.430222222222222e-05, "loss": 1.5336, "step": 2539 }, { "epoch": 20.32, "grad_norm": 16.182851791381836, "learning_rate": 4.4297777777777784e-05, "loss": 1.1558, "step": 2540 }, { "epoch": 20.328, "grad_norm": 66.03412628173828, "learning_rate": 4.429333333333334e-05, "loss": 1.473, "step": 2541 }, { "epoch": 20.336, "grad_norm": 50.90890884399414, "learning_rate": 4.428888888888889e-05, "loss": 2.1669, "step": 2542 }, { "epoch": 20.344, "grad_norm": 51.801700592041016, "learning_rate": 4.428444444444445e-05, "loss": 1.0908, "step": 2543 }, { "epoch": 20.352, "grad_norm": 119.01224517822266, "learning_rate": 4.428e-05, "loss": 1.2385, "step": 2544 }, { "epoch": 20.36, "grad_norm": 79.3949966430664, "learning_rate": 4.427555555555556e-05, "loss": 1.4878, "step": 2545 }, { "epoch": 20.368, "grad_norm": 37.37410354614258, "learning_rate": 4.427111111111111e-05, "loss": 1.5561, "step": 2546 }, { "epoch": 20.376, "grad_norm": 25.466529846191406, "learning_rate": 4.426666666666667e-05, "loss": 1.4599, "step": 2547 }, { "epoch": 20.384, "grad_norm": 27.52873992919922, "learning_rate": 4.426222222222222e-05, "loss": 1.6713, "step": 2548 }, { "epoch": 20.392, "grad_norm": 26.691333770751953, "learning_rate": 4.4257777777777784e-05, "loss": 1.2301, "step": 2549 }, { "epoch": 20.4, "grad_norm": 30.16112518310547, "learning_rate": 4.425333333333334e-05, "loss": 1.575, "step": 2550 }, { "epoch": 20.408, "grad_norm": 32.43531799316406, "learning_rate": 4.424888888888889e-05, "loss": 1.4475, "step": 2551 }, { "epoch": 20.416, "grad_norm": 54.97245788574219, "learning_rate": 4.424444444444444e-05, "loss": 1.9585, "step": 2552 }, { "epoch": 20.424, "grad_norm": 46.13304901123047, "learning_rate": 4.424e-05, "loss": 1.6735, "step": 2553 }, { "epoch": 20.432, "grad_norm": 22.454660415649414, "learning_rate": 4.423555555555556e-05, "loss": 2.2746, "step": 2554 }, { "epoch": 20.44, "grad_norm": 27.439912796020508, "learning_rate": 4.423111111111111e-05, "loss": 1.0557, "step": 2555 }, { "epoch": 20.448, "grad_norm": 35.8802604675293, "learning_rate": 4.422666666666667e-05, "loss": 1.0734, "step": 2556 }, { "epoch": 20.456, "grad_norm": 28.699796676635742, "learning_rate": 4.422222222222222e-05, "loss": 1.6928, "step": 2557 }, { "epoch": 20.464, "grad_norm": 42.692405700683594, "learning_rate": 4.421777777777778e-05, "loss": 1.1165, "step": 2558 }, { "epoch": 20.472, "grad_norm": 27.525863647460938, "learning_rate": 4.421333333333333e-05, "loss": 1.1119, "step": 2559 }, { "epoch": 20.48, "grad_norm": 28.347219467163086, "learning_rate": 4.420888888888889e-05, "loss": 1.0548, "step": 2560 }, { "epoch": 20.488, "grad_norm": 30.190410614013672, "learning_rate": 4.420444444444445e-05, "loss": 1.2234, "step": 2561 }, { "epoch": 20.496, "grad_norm": 26.044570922851562, "learning_rate": 4.4200000000000004e-05, "loss": 1.6456, "step": 2562 }, { "epoch": 20.504, "grad_norm": 66.20430755615234, "learning_rate": 4.419555555555556e-05, "loss": 1.4244, "step": 2563 }, { "epoch": 20.512, "grad_norm": 21.19099998474121, "learning_rate": 4.4191111111111113e-05, "loss": 3.1456, "step": 2564 }, { "epoch": 20.52, "grad_norm": 18.126201629638672, "learning_rate": 4.418666666666667e-05, "loss": 1.3071, "step": 2565 }, { "epoch": 20.528, "grad_norm": 23.249164581298828, "learning_rate": 4.418222222222222e-05, "loss": 1.2425, "step": 2566 }, { "epoch": 20.536, "grad_norm": 27.808454513549805, "learning_rate": 4.417777777777778e-05, "loss": 0.9226, "step": 2567 }, { "epoch": 20.544, "grad_norm": 33.65149688720703, "learning_rate": 4.417333333333333e-05, "loss": 1.4256, "step": 2568 }, { "epoch": 20.552, "grad_norm": 25.06113624572754, "learning_rate": 4.4168888888888894e-05, "loss": 1.1797, "step": 2569 }, { "epoch": 20.56, "grad_norm": 58.36106491088867, "learning_rate": 4.416444444444445e-05, "loss": 1.7135, "step": 2570 }, { "epoch": 20.568, "grad_norm": 53.327518463134766, "learning_rate": 4.4160000000000004e-05, "loss": 1.5636, "step": 2571 }, { "epoch": 20.576, "grad_norm": 26.918161392211914, "learning_rate": 4.415555555555556e-05, "loss": 1.1946, "step": 2572 }, { "epoch": 20.584, "grad_norm": 40.552085876464844, "learning_rate": 4.4151111111111114e-05, "loss": 1.5293, "step": 2573 }, { "epoch": 20.592, "grad_norm": 33.476890563964844, "learning_rate": 4.414666666666667e-05, "loss": 1.4688, "step": 2574 }, { "epoch": 20.6, "grad_norm": 21.982742309570312, "learning_rate": 4.4142222222222223e-05, "loss": 1.4362, "step": 2575 }, { "epoch": 20.608, "grad_norm": 32.0804557800293, "learning_rate": 4.413777777777778e-05, "loss": 1.505, "step": 2576 }, { "epoch": 20.616, "grad_norm": 41.303138732910156, "learning_rate": 4.413333333333334e-05, "loss": 0.8983, "step": 2577 }, { "epoch": 20.624, "grad_norm": 27.60807228088379, "learning_rate": 4.4128888888888895e-05, "loss": 1.5216, "step": 2578 }, { "epoch": 20.632, "grad_norm": 23.510700225830078, "learning_rate": 4.412444444444444e-05, "loss": 0.9506, "step": 2579 }, { "epoch": 20.64, "grad_norm": 42.199493408203125, "learning_rate": 4.412e-05, "loss": 1.169, "step": 2580 }, { "epoch": 20.648, "grad_norm": 21.277652740478516, "learning_rate": 4.411555555555556e-05, "loss": 1.0283, "step": 2581 }, { "epoch": 20.656, "grad_norm": 49.75737380981445, "learning_rate": 4.4111111111111114e-05, "loss": 1.2817, "step": 2582 }, { "epoch": 20.664, "grad_norm": 18.211467742919922, "learning_rate": 4.410666666666667e-05, "loss": 1.6704, "step": 2583 }, { "epoch": 20.672, "grad_norm": 42.97862243652344, "learning_rate": 4.4102222222222224e-05, "loss": 1.3261, "step": 2584 }, { "epoch": 20.68, "grad_norm": 20.321693420410156, "learning_rate": 4.4097777777777785e-05, "loss": 1.5963, "step": 2585 }, { "epoch": 20.688, "grad_norm": 19.64729118347168, "learning_rate": 4.4093333333333334e-05, "loss": 1.2347, "step": 2586 }, { "epoch": 20.696, "grad_norm": 33.30612564086914, "learning_rate": 4.408888888888889e-05, "loss": 1.3405, "step": 2587 }, { "epoch": 20.704, "grad_norm": 54.44623565673828, "learning_rate": 4.408444444444444e-05, "loss": 0.9805, "step": 2588 }, { "epoch": 20.712, "grad_norm": 24.057735443115234, "learning_rate": 4.4080000000000005e-05, "loss": 1.317, "step": 2589 }, { "epoch": 20.72, "grad_norm": 23.643362045288086, "learning_rate": 4.407555555555556e-05, "loss": 0.9965, "step": 2590 }, { "epoch": 20.728, "grad_norm": 34.757781982421875, "learning_rate": 4.4071111111111115e-05, "loss": 1.6024, "step": 2591 }, { "epoch": 20.736, "grad_norm": 48.24336242675781, "learning_rate": 4.406666666666667e-05, "loss": 1.5594, "step": 2592 }, { "epoch": 20.744, "grad_norm": 49.35659408569336, "learning_rate": 4.4062222222222224e-05, "loss": 1.1871, "step": 2593 }, { "epoch": 20.752, "grad_norm": 26.308591842651367, "learning_rate": 4.405777777777778e-05, "loss": 1.2997, "step": 2594 }, { "epoch": 20.76, "grad_norm": 26.268808364868164, "learning_rate": 4.4053333333333334e-05, "loss": 1.5618, "step": 2595 }, { "epoch": 20.768, "grad_norm": 71.34526062011719, "learning_rate": 4.404888888888889e-05, "loss": 1.402, "step": 2596 }, { "epoch": 20.776, "grad_norm": 33.8690071105957, "learning_rate": 4.404444444444445e-05, "loss": 0.9773, "step": 2597 }, { "epoch": 20.784, "grad_norm": 23.450576782226562, "learning_rate": 4.4040000000000005e-05, "loss": 1.0261, "step": 2598 }, { "epoch": 20.792, "grad_norm": 84.74644470214844, "learning_rate": 4.403555555555556e-05, "loss": 1.4702, "step": 2599 }, { "epoch": 20.8, "grad_norm": 41.37596130371094, "learning_rate": 4.4031111111111115e-05, "loss": 1.1322, "step": 2600 }, { "epoch": 20.808, "grad_norm": 167.44725036621094, "learning_rate": 4.402666666666666e-05, "loss": 1.4571, "step": 2601 }, { "epoch": 20.816, "grad_norm": 22.845762252807617, "learning_rate": 4.4022222222222225e-05, "loss": 1.5075, "step": 2602 }, { "epoch": 20.824, "grad_norm": 25.514284133911133, "learning_rate": 4.401777777777778e-05, "loss": 1.0903, "step": 2603 }, { "epoch": 20.832, "grad_norm": 35.68497085571289, "learning_rate": 4.4013333333333334e-05, "loss": 1.3739, "step": 2604 }, { "epoch": 20.84, "grad_norm": 42.64023971557617, "learning_rate": 4.400888888888889e-05, "loss": 1.2628, "step": 2605 }, { "epoch": 20.848, "grad_norm": 23.15612030029297, "learning_rate": 4.400444444444445e-05, "loss": 1.2656, "step": 2606 }, { "epoch": 20.856, "grad_norm": 60.117008209228516, "learning_rate": 4.4000000000000006e-05, "loss": 2.2755, "step": 2607 }, { "epoch": 20.864, "grad_norm": 20.67118263244629, "learning_rate": 4.3995555555555554e-05, "loss": 1.4676, "step": 2608 }, { "epoch": 20.872, "grad_norm": 246.24014282226562, "learning_rate": 4.399111111111111e-05, "loss": 1.4512, "step": 2609 }, { "epoch": 20.88, "grad_norm": 33.30849075317383, "learning_rate": 4.398666666666667e-05, "loss": 1.4222, "step": 2610 }, { "epoch": 20.888, "grad_norm": 30.710968017578125, "learning_rate": 4.3982222222222225e-05, "loss": 1.139, "step": 2611 }, { "epoch": 20.896, "grad_norm": 24.20098114013672, "learning_rate": 4.397777777777778e-05, "loss": 1.2112, "step": 2612 }, { "epoch": 20.904, "grad_norm": 20.27486801147461, "learning_rate": 4.3973333333333335e-05, "loss": 1.5327, "step": 2613 }, { "epoch": 20.912, "grad_norm": 20.557701110839844, "learning_rate": 4.396888888888889e-05, "loss": 2.0575, "step": 2614 }, { "epoch": 20.92, "grad_norm": 17.791025161743164, "learning_rate": 4.3964444444444444e-05, "loss": 1.4449, "step": 2615 }, { "epoch": 20.928, "grad_norm": 63.6075439453125, "learning_rate": 4.396e-05, "loss": 1.5652, "step": 2616 }, { "epoch": 20.936, "grad_norm": 82.52583312988281, "learning_rate": 4.3955555555555554e-05, "loss": 1.1821, "step": 2617 }, { "epoch": 20.944, "grad_norm": 19.0244140625, "learning_rate": 4.3951111111111116e-05, "loss": 1.1948, "step": 2618 }, { "epoch": 20.951999999999998, "grad_norm": 36.02129364013672, "learning_rate": 4.394666666666667e-05, "loss": 1.1518, "step": 2619 }, { "epoch": 20.96, "grad_norm": 24.227785110473633, "learning_rate": 4.3942222222222225e-05, "loss": 1.4649, "step": 2620 }, { "epoch": 20.968, "grad_norm": 20.680158615112305, "learning_rate": 4.393777777777778e-05, "loss": 1.3507, "step": 2621 }, { "epoch": 20.976, "grad_norm": 25.96541404724121, "learning_rate": 4.3933333333333335e-05, "loss": 1.4451, "step": 2622 }, { "epoch": 20.984, "grad_norm": 31.68254280090332, "learning_rate": 4.392888888888889e-05, "loss": 1.4648, "step": 2623 }, { "epoch": 20.992, "grad_norm": 32.51976013183594, "learning_rate": 4.3924444444444445e-05, "loss": 1.415, "step": 2624 }, { "epoch": 21.0, "grad_norm": 25.136301040649414, "learning_rate": 4.392e-05, "loss": 1.2028, "step": 2625 }, { "epoch": 21.0, "eval_loss": 1.3190735578536987, "eval_map": 0.28, "eval_map_50": 0.5451, "eval_map_75": 0.2402, "eval_map_Coverall": 0.5401, "eval_map_Face_Shield": 0.3034, "eval_map_Gloves": 0.1616, "eval_map_Goggles": 0.1041, "eval_map_Mask": 0.2907, "eval_map_large": 0.3293, "eval_map_medium": 0.1768, "eval_map_small": 0.1693, "eval_mar_1": 0.2576, "eval_mar_10": 0.4589, "eval_mar_100": 0.4757, "eval_mar_100_Coverall": 0.6867, "eval_mar_100_Face_Shield": 0.5824, "eval_mar_100_Gloves": 0.3426, "eval_mar_100_Goggles": 0.3688, "eval_mar_100_Mask": 0.3981, "eval_mar_large": 0.5596, "eval_mar_medium": 0.3431, "eval_mar_small": 0.2898, "eval_runtime": 0.9261, "eval_samples_per_second": 31.313, "eval_steps_per_second": 2.16, "step": 2625 }, { "epoch": 21.008, "grad_norm": 40.2640266418457, "learning_rate": 4.391555555555556e-05, "loss": 1.1966, "step": 2626 }, { "epoch": 21.016, "grad_norm": 68.59814453125, "learning_rate": 4.3911111111111116e-05, "loss": 1.6291, "step": 2627 }, { "epoch": 21.024, "grad_norm": 24.38972282409668, "learning_rate": 4.390666666666667e-05, "loss": 1.672, "step": 2628 }, { "epoch": 21.032, "grad_norm": 22.943571090698242, "learning_rate": 4.390222222222222e-05, "loss": 1.2122, "step": 2629 }, { "epoch": 21.04, "grad_norm": 30.214651107788086, "learning_rate": 4.389777777777778e-05, "loss": 1.2391, "step": 2630 }, { "epoch": 21.048, "grad_norm": 32.27757263183594, "learning_rate": 4.3893333333333335e-05, "loss": 1.0514, "step": 2631 }, { "epoch": 21.056, "grad_norm": 290.69036865234375, "learning_rate": 4.388888888888889e-05, "loss": 1.3591, "step": 2632 }, { "epoch": 21.064, "grad_norm": 32.69879150390625, "learning_rate": 4.3884444444444445e-05, "loss": 2.1499, "step": 2633 }, { "epoch": 21.072, "grad_norm": 43.10075759887695, "learning_rate": 4.388000000000001e-05, "loss": 1.0472, "step": 2634 }, { "epoch": 21.08, "grad_norm": 28.873136520385742, "learning_rate": 4.387555555555556e-05, "loss": 1.4291, "step": 2635 }, { "epoch": 21.088, "grad_norm": 27.500804901123047, "learning_rate": 4.387111111111111e-05, "loss": 1.1906, "step": 2636 }, { "epoch": 21.096, "grad_norm": 53.811004638671875, "learning_rate": 4.3866666666666665e-05, "loss": 1.0059, "step": 2637 }, { "epoch": 21.104, "grad_norm": 64.00930786132812, "learning_rate": 4.3862222222222226e-05, "loss": 1.2781, "step": 2638 }, { "epoch": 21.112, "grad_norm": 22.43989372253418, "learning_rate": 4.385777777777778e-05, "loss": 1.7493, "step": 2639 }, { "epoch": 21.12, "grad_norm": 34.65531921386719, "learning_rate": 4.3853333333333336e-05, "loss": 1.2331, "step": 2640 }, { "epoch": 21.128, "grad_norm": 19.809898376464844, "learning_rate": 4.384888888888889e-05, "loss": 1.3402, "step": 2641 }, { "epoch": 21.136, "grad_norm": 51.054691314697266, "learning_rate": 4.384444444444445e-05, "loss": 1.2064, "step": 2642 }, { "epoch": 21.144, "grad_norm": 40.56196594238281, "learning_rate": 4.384e-05, "loss": 1.2365, "step": 2643 }, { "epoch": 21.152, "grad_norm": 36.361331939697266, "learning_rate": 4.3835555555555555e-05, "loss": 1.2168, "step": 2644 }, { "epoch": 21.16, "grad_norm": 35.305519104003906, "learning_rate": 4.383111111111111e-05, "loss": 1.1919, "step": 2645 }, { "epoch": 21.168, "grad_norm": 35.01167297363281, "learning_rate": 4.382666666666667e-05, "loss": 1.2493, "step": 2646 }, { "epoch": 21.176, "grad_norm": 24.53581428527832, "learning_rate": 4.3822222222222227e-05, "loss": 1.2482, "step": 2647 }, { "epoch": 21.184, "grad_norm": 42.630218505859375, "learning_rate": 4.381777777777778e-05, "loss": 2.2662, "step": 2648 }, { "epoch": 21.192, "grad_norm": 34.30054473876953, "learning_rate": 4.3813333333333336e-05, "loss": 1.318, "step": 2649 }, { "epoch": 21.2, "grad_norm": 16.568134307861328, "learning_rate": 4.380888888888889e-05, "loss": 1.129, "step": 2650 }, { "epoch": 21.208, "grad_norm": 128.5356903076172, "learning_rate": 4.3804444444444446e-05, "loss": 1.0053, "step": 2651 }, { "epoch": 21.216, "grad_norm": 50.18452835083008, "learning_rate": 4.38e-05, "loss": 1.2832, "step": 2652 }, { "epoch": 21.224, "grad_norm": 36.45592498779297, "learning_rate": 4.3795555555555556e-05, "loss": 1.4171, "step": 2653 }, { "epoch": 21.232, "grad_norm": 30.609113693237305, "learning_rate": 4.379111111111111e-05, "loss": 1.6975, "step": 2654 }, { "epoch": 21.24, "grad_norm": 22.92009925842285, "learning_rate": 4.378666666666667e-05, "loss": 1.3652, "step": 2655 }, { "epoch": 21.248, "grad_norm": 37.76826095581055, "learning_rate": 4.378222222222223e-05, "loss": 1.2116, "step": 2656 }, { "epoch": 21.256, "grad_norm": 36.74630355834961, "learning_rate": 4.377777777777778e-05, "loss": 1.2033, "step": 2657 }, { "epoch": 21.264, "grad_norm": 40.2330322265625, "learning_rate": 4.377333333333333e-05, "loss": 1.2058, "step": 2658 }, { "epoch": 21.272, "grad_norm": 26.51604652404785, "learning_rate": 4.376888888888889e-05, "loss": 1.1518, "step": 2659 }, { "epoch": 21.28, "grad_norm": 28.088111877441406, "learning_rate": 4.3764444444444446e-05, "loss": 1.7473, "step": 2660 }, { "epoch": 21.288, "grad_norm": 33.96568298339844, "learning_rate": 4.376e-05, "loss": 1.1631, "step": 2661 }, { "epoch": 21.296, "grad_norm": 37.96345901489258, "learning_rate": 4.3755555555555556e-05, "loss": 1.4127, "step": 2662 }, { "epoch": 21.304, "grad_norm": 38.8945426940918, "learning_rate": 4.375111111111112e-05, "loss": 1.7284, "step": 2663 }, { "epoch": 21.312, "grad_norm": 26.939802169799805, "learning_rate": 4.374666666666667e-05, "loss": 1.3102, "step": 2664 }, { "epoch": 21.32, "grad_norm": 29.50632095336914, "learning_rate": 4.374222222222222e-05, "loss": 1.2061, "step": 2665 }, { "epoch": 21.328, "grad_norm": 28.528030395507812, "learning_rate": 4.3737777777777775e-05, "loss": 1.3324, "step": 2666 }, { "epoch": 21.336, "grad_norm": 21.925235748291016, "learning_rate": 4.373333333333334e-05, "loss": 1.6937, "step": 2667 }, { "epoch": 21.344, "grad_norm": 24.158803939819336, "learning_rate": 4.372888888888889e-05, "loss": 1.541, "step": 2668 }, { "epoch": 21.352, "grad_norm": 30.02572250366211, "learning_rate": 4.372444444444445e-05, "loss": 1.2667, "step": 2669 }, { "epoch": 21.36, "grad_norm": 22.674015045166016, "learning_rate": 4.372e-05, "loss": 1.2065, "step": 2670 }, { "epoch": 21.368, "grad_norm": 35.409732818603516, "learning_rate": 4.3715555555555556e-05, "loss": 1.5493, "step": 2671 }, { "epoch": 21.376, "grad_norm": 53.810909271240234, "learning_rate": 4.371111111111111e-05, "loss": 1.3469, "step": 2672 }, { "epoch": 21.384, "grad_norm": 25.625802993774414, "learning_rate": 4.3706666666666666e-05, "loss": 1.5334, "step": 2673 }, { "epoch": 21.392, "grad_norm": 47.043067932128906, "learning_rate": 4.370222222222222e-05, "loss": 1.2145, "step": 2674 }, { "epoch": 21.4, "grad_norm": 28.25102424621582, "learning_rate": 4.369777777777778e-05, "loss": 1.1925, "step": 2675 }, { "epoch": 21.408, "grad_norm": 28.798946380615234, "learning_rate": 4.369333333333334e-05, "loss": 1.3895, "step": 2676 }, { "epoch": 21.416, "grad_norm": 30.69869041442871, "learning_rate": 4.368888888888889e-05, "loss": 1.4228, "step": 2677 }, { "epoch": 21.424, "grad_norm": 16.745756149291992, "learning_rate": 4.368444444444445e-05, "loss": 1.1181, "step": 2678 }, { "epoch": 21.432, "grad_norm": 26.52385902404785, "learning_rate": 4.368e-05, "loss": 1.3317, "step": 2679 }, { "epoch": 21.44, "grad_norm": 22.024568557739258, "learning_rate": 4.367555555555556e-05, "loss": 1.3218, "step": 2680 }, { "epoch": 21.448, "grad_norm": 20.214418411254883, "learning_rate": 4.367111111111111e-05, "loss": 1.2244, "step": 2681 }, { "epoch": 21.456, "grad_norm": 40.8922119140625, "learning_rate": 4.3666666666666666e-05, "loss": 1.211, "step": 2682 }, { "epoch": 21.464, "grad_norm": 16.561613082885742, "learning_rate": 4.366222222222223e-05, "loss": 0.9495, "step": 2683 }, { "epoch": 21.472, "grad_norm": 21.747913360595703, "learning_rate": 4.365777777777778e-05, "loss": 1.5702, "step": 2684 }, { "epoch": 21.48, "grad_norm": 34.73337173461914, "learning_rate": 4.365333333333334e-05, "loss": 0.9796, "step": 2685 }, { "epoch": 21.488, "grad_norm": 75.97407531738281, "learning_rate": 4.3648888888888886e-05, "loss": 3.2854, "step": 2686 }, { "epoch": 21.496, "grad_norm": 26.21042251586914, "learning_rate": 4.364444444444445e-05, "loss": 1.5064, "step": 2687 }, { "epoch": 21.504, "grad_norm": 18.11751365661621, "learning_rate": 4.364e-05, "loss": 2.5186, "step": 2688 }, { "epoch": 21.512, "grad_norm": 151.49276733398438, "learning_rate": 4.363555555555556e-05, "loss": 1.1368, "step": 2689 }, { "epoch": 21.52, "grad_norm": 57.75060272216797, "learning_rate": 4.363111111111111e-05, "loss": 1.4393, "step": 2690 }, { "epoch": 21.528, "grad_norm": 27.08339500427246, "learning_rate": 4.3626666666666674e-05, "loss": 1.3033, "step": 2691 }, { "epoch": 21.536, "grad_norm": 34.05753707885742, "learning_rate": 4.362222222222223e-05, "loss": 1.1153, "step": 2692 }, { "epoch": 21.544, "grad_norm": 18.995132446289062, "learning_rate": 4.3617777777777777e-05, "loss": 1.4161, "step": 2693 }, { "epoch": 21.552, "grad_norm": 31.091949462890625, "learning_rate": 4.361333333333333e-05, "loss": 1.7377, "step": 2694 }, { "epoch": 21.56, "grad_norm": 26.274852752685547, "learning_rate": 4.360888888888889e-05, "loss": 1.4135, "step": 2695 }, { "epoch": 21.568, "grad_norm": 54.98998260498047, "learning_rate": 4.360444444444445e-05, "loss": 1.4337, "step": 2696 }, { "epoch": 21.576, "grad_norm": 37.39833068847656, "learning_rate": 4.36e-05, "loss": 1.0933, "step": 2697 }, { "epoch": 21.584, "grad_norm": 52.24159622192383, "learning_rate": 4.359555555555556e-05, "loss": 1.5681, "step": 2698 }, { "epoch": 21.592, "grad_norm": 16.789112091064453, "learning_rate": 4.359111111111112e-05, "loss": 1.268, "step": 2699 }, { "epoch": 21.6, "grad_norm": 26.715248107910156, "learning_rate": 4.358666666666667e-05, "loss": 1.3584, "step": 2700 }, { "epoch": 21.608, "grad_norm": 21.28890037536621, "learning_rate": 4.358222222222222e-05, "loss": 1.3124, "step": 2701 }, { "epoch": 21.616, "grad_norm": 21.63428497314453, "learning_rate": 4.357777777777778e-05, "loss": 1.1397, "step": 2702 }, { "epoch": 21.624, "grad_norm": 36.611412048339844, "learning_rate": 4.357333333333333e-05, "loss": 1.1759, "step": 2703 }, { "epoch": 21.632, "grad_norm": 39.835235595703125, "learning_rate": 4.356888888888889e-05, "loss": 1.857, "step": 2704 }, { "epoch": 21.64, "grad_norm": 22.026569366455078, "learning_rate": 4.356444444444445e-05, "loss": 1.0592, "step": 2705 }, { "epoch": 21.648, "grad_norm": 25.271289825439453, "learning_rate": 4.356e-05, "loss": 1.3264, "step": 2706 }, { "epoch": 21.656, "grad_norm": 36.7620735168457, "learning_rate": 4.355555555555556e-05, "loss": 1.5612, "step": 2707 }, { "epoch": 21.664, "grad_norm": 24.944358825683594, "learning_rate": 4.355111111111111e-05, "loss": 1.6069, "step": 2708 }, { "epoch": 21.672, "grad_norm": 23.426164627075195, "learning_rate": 4.354666666666667e-05, "loss": 2.4426, "step": 2709 }, { "epoch": 21.68, "grad_norm": 34.1031608581543, "learning_rate": 4.354222222222222e-05, "loss": 1.2481, "step": 2710 }, { "epoch": 21.688, "grad_norm": 22.504301071166992, "learning_rate": 4.353777777777778e-05, "loss": 1.7099, "step": 2711 }, { "epoch": 21.696, "grad_norm": 106.3943099975586, "learning_rate": 4.353333333333334e-05, "loss": 1.4485, "step": 2712 }, { "epoch": 21.704, "grad_norm": 260.87835693359375, "learning_rate": 4.3528888888888894e-05, "loss": 2.0045, "step": 2713 }, { "epoch": 21.712, "grad_norm": 23.3950138092041, "learning_rate": 4.352444444444445e-05, "loss": 2.2014, "step": 2714 }, { "epoch": 21.72, "grad_norm": 21.76860237121582, "learning_rate": 4.352e-05, "loss": 1.3061, "step": 2715 }, { "epoch": 21.728, "grad_norm": 16.790786743164062, "learning_rate": 4.351555555555556e-05, "loss": 1.1833, "step": 2716 }, { "epoch": 21.736, "grad_norm": 32.63910675048828, "learning_rate": 4.351111111111111e-05, "loss": 1.768, "step": 2717 }, { "epoch": 21.744, "grad_norm": 41.49203109741211, "learning_rate": 4.350666666666667e-05, "loss": 1.6187, "step": 2718 }, { "epoch": 21.752, "grad_norm": 56.52021408081055, "learning_rate": 4.350222222222222e-05, "loss": 1.4714, "step": 2719 }, { "epoch": 21.76, "grad_norm": 29.911251068115234, "learning_rate": 4.3497777777777784e-05, "loss": 1.2584, "step": 2720 }, { "epoch": 21.768, "grad_norm": 54.646934509277344, "learning_rate": 4.349333333333334e-05, "loss": 1.3836, "step": 2721 }, { "epoch": 21.776, "grad_norm": 25.017318725585938, "learning_rate": 4.348888888888889e-05, "loss": 1.5376, "step": 2722 }, { "epoch": 21.784, "grad_norm": 182.27938842773438, "learning_rate": 4.348444444444444e-05, "loss": 1.7264, "step": 2723 }, { "epoch": 21.792, "grad_norm": 29.649633407592773, "learning_rate": 4.3480000000000004e-05, "loss": 0.9551, "step": 2724 }, { "epoch": 21.8, "grad_norm": 23.909095764160156, "learning_rate": 4.347555555555556e-05, "loss": 1.3366, "step": 2725 }, { "epoch": 21.808, "grad_norm": 116.08333587646484, "learning_rate": 4.3471111111111114e-05, "loss": 1.3645, "step": 2726 }, { "epoch": 21.816, "grad_norm": 40.675716400146484, "learning_rate": 4.346666666666667e-05, "loss": 1.1562, "step": 2727 }, { "epoch": 21.824, "grad_norm": 59.22348403930664, "learning_rate": 4.346222222222222e-05, "loss": 1.2515, "step": 2728 }, { "epoch": 21.832, "grad_norm": 60.22919845581055, "learning_rate": 4.345777777777778e-05, "loss": 1.4272, "step": 2729 }, { "epoch": 21.84, "grad_norm": 27.927270889282227, "learning_rate": 4.345333333333333e-05, "loss": 1.3035, "step": 2730 }, { "epoch": 21.848, "grad_norm": 20.882640838623047, "learning_rate": 4.344888888888889e-05, "loss": 1.202, "step": 2731 }, { "epoch": 21.856, "grad_norm": 40.884464263916016, "learning_rate": 4.344444444444445e-05, "loss": 1.3368, "step": 2732 }, { "epoch": 21.864, "grad_norm": 24.742307662963867, "learning_rate": 4.3440000000000004e-05, "loss": 1.1536, "step": 2733 }, { "epoch": 21.872, "grad_norm": 50.60906982421875, "learning_rate": 4.343555555555556e-05, "loss": 1.2681, "step": 2734 }, { "epoch": 21.88, "grad_norm": 20.58029556274414, "learning_rate": 4.3431111111111114e-05, "loss": 1.4504, "step": 2735 }, { "epoch": 21.888, "grad_norm": 27.629655838012695, "learning_rate": 4.342666666666667e-05, "loss": 0.812, "step": 2736 }, { "epoch": 21.896, "grad_norm": 38.169403076171875, "learning_rate": 4.3422222222222224e-05, "loss": 1.3418, "step": 2737 }, { "epoch": 21.904, "grad_norm": 27.11526870727539, "learning_rate": 4.341777777777778e-05, "loss": 1.2475, "step": 2738 }, { "epoch": 21.912, "grad_norm": 40.81646728515625, "learning_rate": 4.341333333333333e-05, "loss": 1.3158, "step": 2739 }, { "epoch": 21.92, "grad_norm": 23.706207275390625, "learning_rate": 4.3408888888888895e-05, "loss": 1.1998, "step": 2740 }, { "epoch": 21.928, "grad_norm": 40.6219367980957, "learning_rate": 4.340444444444445e-05, "loss": 1.3498, "step": 2741 }, { "epoch": 21.936, "grad_norm": 53.10405731201172, "learning_rate": 4.3400000000000005e-05, "loss": 1.317, "step": 2742 }, { "epoch": 21.944, "grad_norm": 23.29509925842285, "learning_rate": 4.339555555555555e-05, "loss": 1.2984, "step": 2743 }, { "epoch": 21.951999999999998, "grad_norm": 49.31142044067383, "learning_rate": 4.3391111111111114e-05, "loss": 1.9685, "step": 2744 }, { "epoch": 21.96, "grad_norm": 41.760643005371094, "learning_rate": 4.338666666666667e-05, "loss": 1.131, "step": 2745 }, { "epoch": 21.968, "grad_norm": 23.23260498046875, "learning_rate": 4.3382222222222224e-05, "loss": 1.2236, "step": 2746 }, { "epoch": 21.976, "grad_norm": 38.227439880371094, "learning_rate": 4.337777777777778e-05, "loss": 1.2179, "step": 2747 }, { "epoch": 21.984, "grad_norm": 18.506738662719727, "learning_rate": 4.337333333333334e-05, "loss": 1.7107, "step": 2748 }, { "epoch": 21.992, "grad_norm": 35.556846618652344, "learning_rate": 4.3368888888888895e-05, "loss": 0.9608, "step": 2749 }, { "epoch": 22.0, "grad_norm": 33.74531936645508, "learning_rate": 4.336444444444444e-05, "loss": 0.8594, "step": 2750 }, { "epoch": 22.0, "eval_loss": 1.4305378198623657, "eval_map": 0.2441, "eval_map_50": 0.5284, "eval_map_75": 0.2127, "eval_map_Coverall": 0.4868, "eval_map_Face_Shield": 0.2731, "eval_map_Gloves": 0.1733, "eval_map_Goggles": 0.0812, "eval_map_Mask": 0.206, "eval_map_large": 0.4243, "eval_map_medium": 0.1592, "eval_map_small": 0.1177, "eval_mar_1": 0.2434, "eval_mar_10": 0.4243, "eval_mar_100": 0.4442, "eval_mar_100_Coverall": 0.6356, "eval_mar_100_Face_Shield": 0.5118, "eval_mar_100_Gloves": 0.3377, "eval_mar_100_Goggles": 0.3938, "eval_mar_100_Mask": 0.3423, "eval_mar_large": 0.594, "eval_mar_medium": 0.329, "eval_mar_small": 0.1793, "eval_runtime": 0.9141, "eval_samples_per_second": 31.725, "eval_steps_per_second": 2.188, "step": 2750 }, { "epoch": 22.008, "grad_norm": 31.43215560913086, "learning_rate": 4.336e-05, "loss": 1.4213, "step": 2751 }, { "epoch": 22.016, "grad_norm": 26.872455596923828, "learning_rate": 4.335555555555556e-05, "loss": 1.3683, "step": 2752 }, { "epoch": 22.024, "grad_norm": 28.442537307739258, "learning_rate": 4.3351111111111115e-05, "loss": 1.25, "step": 2753 }, { "epoch": 22.032, "grad_norm": 51.34507751464844, "learning_rate": 4.334666666666667e-05, "loss": 1.4366, "step": 2754 }, { "epoch": 22.04, "grad_norm": 86.00029754638672, "learning_rate": 4.3342222222222224e-05, "loss": 1.5027, "step": 2755 }, { "epoch": 22.048, "grad_norm": 28.242218017578125, "learning_rate": 4.333777777777778e-05, "loss": 1.3109, "step": 2756 }, { "epoch": 22.056, "grad_norm": 17.262439727783203, "learning_rate": 4.3333333333333334e-05, "loss": 1.0267, "step": 2757 }, { "epoch": 22.064, "grad_norm": 32.908592224121094, "learning_rate": 4.332888888888889e-05, "loss": 1.5732, "step": 2758 }, { "epoch": 22.072, "grad_norm": 21.97365379333496, "learning_rate": 4.3324444444444444e-05, "loss": 1.5317, "step": 2759 }, { "epoch": 22.08, "grad_norm": 44.88530731201172, "learning_rate": 4.332e-05, "loss": 1.1636, "step": 2760 }, { "epoch": 22.088, "grad_norm": 39.91006088256836, "learning_rate": 4.331555555555556e-05, "loss": 1.3759, "step": 2761 }, { "epoch": 22.096, "grad_norm": 21.88585662841797, "learning_rate": 4.3311111111111115e-05, "loss": 1.0312, "step": 2762 }, { "epoch": 22.104, "grad_norm": 46.07307434082031, "learning_rate": 4.330666666666667e-05, "loss": 1.068, "step": 2763 }, { "epoch": 22.112, "grad_norm": 40.449737548828125, "learning_rate": 4.3302222222222225e-05, "loss": 1.5658, "step": 2764 }, { "epoch": 22.12, "grad_norm": 16.245210647583008, "learning_rate": 4.329777777777778e-05, "loss": 1.1649, "step": 2765 }, { "epoch": 22.128, "grad_norm": 20.72629165649414, "learning_rate": 4.3293333333333334e-05, "loss": 1.5767, "step": 2766 }, { "epoch": 22.136, "grad_norm": 56.79990005493164, "learning_rate": 4.328888888888889e-05, "loss": 1.6021, "step": 2767 }, { "epoch": 22.144, "grad_norm": 28.763778686523438, "learning_rate": 4.3284444444444444e-05, "loss": 1.4251, "step": 2768 }, { "epoch": 22.152, "grad_norm": 18.9632511138916, "learning_rate": 4.3280000000000006e-05, "loss": 1.1506, "step": 2769 }, { "epoch": 22.16, "grad_norm": 70.21590423583984, "learning_rate": 4.327555555555556e-05, "loss": 1.4566, "step": 2770 }, { "epoch": 22.168, "grad_norm": 27.869421005249023, "learning_rate": 4.3271111111111115e-05, "loss": 1.1309, "step": 2771 }, { "epoch": 22.176, "grad_norm": 41.066680908203125, "learning_rate": 4.3266666666666664e-05, "loss": 1.1825, "step": 2772 }, { "epoch": 22.184, "grad_norm": 32.46890640258789, "learning_rate": 4.3262222222222225e-05, "loss": 1.3738, "step": 2773 }, { "epoch": 22.192, "grad_norm": 32.01318359375, "learning_rate": 4.325777777777778e-05, "loss": 1.5219, "step": 2774 }, { "epoch": 22.2, "grad_norm": 29.028514862060547, "learning_rate": 4.3253333333333335e-05, "loss": 1.2834, "step": 2775 }, { "epoch": 22.208, "grad_norm": 68.40060424804688, "learning_rate": 4.324888888888889e-05, "loss": 1.402, "step": 2776 }, { "epoch": 22.216, "grad_norm": 32.79807662963867, "learning_rate": 4.324444444444445e-05, "loss": 1.3274, "step": 2777 }, { "epoch": 22.224, "grad_norm": 25.984222412109375, "learning_rate": 4.324e-05, "loss": 0.912, "step": 2778 }, { "epoch": 22.232, "grad_norm": 33.328712463378906, "learning_rate": 4.3235555555555554e-05, "loss": 1.6547, "step": 2779 }, { "epoch": 22.24, "grad_norm": 24.128164291381836, "learning_rate": 4.323111111111111e-05, "loss": 1.4501, "step": 2780 }, { "epoch": 22.248, "grad_norm": 29.660640716552734, "learning_rate": 4.322666666666667e-05, "loss": 1.6321, "step": 2781 }, { "epoch": 22.256, "grad_norm": 39.60959243774414, "learning_rate": 4.3222222222222226e-05, "loss": 1.3344, "step": 2782 }, { "epoch": 22.264, "grad_norm": 24.13699722290039, "learning_rate": 4.321777777777778e-05, "loss": 1.368, "step": 2783 }, { "epoch": 22.272, "grad_norm": 19.56092643737793, "learning_rate": 4.3213333333333335e-05, "loss": 1.6719, "step": 2784 }, { "epoch": 22.28, "grad_norm": 41.287349700927734, "learning_rate": 4.320888888888889e-05, "loss": 1.3715, "step": 2785 }, { "epoch": 22.288, "grad_norm": 87.18274688720703, "learning_rate": 4.3204444444444445e-05, "loss": 1.2875, "step": 2786 }, { "epoch": 22.296, "grad_norm": 25.46503257751465, "learning_rate": 4.32e-05, "loss": 2.8203, "step": 2787 }, { "epoch": 22.304, "grad_norm": 24.610380172729492, "learning_rate": 4.3195555555555555e-05, "loss": 1.2863, "step": 2788 }, { "epoch": 22.312, "grad_norm": 83.68943786621094, "learning_rate": 4.3191111111111116e-05, "loss": 1.5445, "step": 2789 }, { "epoch": 22.32, "grad_norm": 30.566295623779297, "learning_rate": 4.318666666666667e-05, "loss": 1.2273, "step": 2790 }, { "epoch": 22.328, "grad_norm": 40.21514129638672, "learning_rate": 4.3182222222222226e-05, "loss": 2.0704, "step": 2791 }, { "epoch": 22.336, "grad_norm": 52.009769439697266, "learning_rate": 4.317777777777778e-05, "loss": 2.7646, "step": 2792 }, { "epoch": 22.344, "grad_norm": 35.182029724121094, "learning_rate": 4.3173333333333336e-05, "loss": 1.4525, "step": 2793 }, { "epoch": 22.352, "grad_norm": 104.3758544921875, "learning_rate": 4.316888888888889e-05, "loss": 1.4772, "step": 2794 }, { "epoch": 22.36, "grad_norm": 54.363468170166016, "learning_rate": 4.3164444444444445e-05, "loss": 1.2549, "step": 2795 }, { "epoch": 22.368, "grad_norm": 208.4532012939453, "learning_rate": 4.316e-05, "loss": 1.224, "step": 2796 }, { "epoch": 22.376, "grad_norm": 27.01045036315918, "learning_rate": 4.315555555555556e-05, "loss": 1.1496, "step": 2797 }, { "epoch": 22.384, "grad_norm": 38.486759185791016, "learning_rate": 4.3151111111111117e-05, "loss": 1.3906, "step": 2798 }, { "epoch": 22.392, "grad_norm": 37.94966506958008, "learning_rate": 4.314666666666667e-05, "loss": 1.6254, "step": 2799 }, { "epoch": 22.4, "grad_norm": 98.37265014648438, "learning_rate": 4.314222222222222e-05, "loss": 1.3842, "step": 2800 }, { "epoch": 22.408, "grad_norm": 39.53353500366211, "learning_rate": 4.313777777777778e-05, "loss": 2.2781, "step": 2801 }, { "epoch": 22.416, "grad_norm": 35.65843200683594, "learning_rate": 4.3133333333333336e-05, "loss": 1.1684, "step": 2802 }, { "epoch": 22.424, "grad_norm": 41.286376953125, "learning_rate": 4.312888888888889e-05, "loss": 1.332, "step": 2803 }, { "epoch": 22.432, "grad_norm": 35.27940368652344, "learning_rate": 4.3124444444444446e-05, "loss": 1.0662, "step": 2804 }, { "epoch": 22.44, "grad_norm": 57.36452865600586, "learning_rate": 4.312000000000001e-05, "loss": 1.2082, "step": 2805 }, { "epoch": 22.448, "grad_norm": 36.64564895629883, "learning_rate": 4.311555555555556e-05, "loss": 1.0693, "step": 2806 }, { "epoch": 22.456, "grad_norm": 58.352298736572266, "learning_rate": 4.311111111111111e-05, "loss": 1.103, "step": 2807 }, { "epoch": 22.464, "grad_norm": 22.201953887939453, "learning_rate": 4.3106666666666665e-05, "loss": 1.5102, "step": 2808 }, { "epoch": 22.472, "grad_norm": 38.22883224487305, "learning_rate": 4.310222222222222e-05, "loss": 1.3847, "step": 2809 }, { "epoch": 22.48, "grad_norm": 23.98018455505371, "learning_rate": 4.309777777777778e-05, "loss": 1.766, "step": 2810 }, { "epoch": 22.488, "grad_norm": 24.133268356323242, "learning_rate": 4.3093333333333336e-05, "loss": 1.3924, "step": 2811 }, { "epoch": 22.496, "grad_norm": 180.8636932373047, "learning_rate": 4.308888888888889e-05, "loss": 1.1747, "step": 2812 }, { "epoch": 22.504, "grad_norm": 176.6978759765625, "learning_rate": 4.3084444444444446e-05, "loss": 1.4646, "step": 2813 }, { "epoch": 22.512, "grad_norm": 34.72708511352539, "learning_rate": 4.308e-05, "loss": 1.3098, "step": 2814 }, { "epoch": 22.52, "grad_norm": 30.17572593688965, "learning_rate": 4.3075555555555556e-05, "loss": 1.2086, "step": 2815 }, { "epoch": 22.528, "grad_norm": 23.406658172607422, "learning_rate": 4.307111111111111e-05, "loss": 1.6301, "step": 2816 }, { "epoch": 22.536, "grad_norm": 22.12211799621582, "learning_rate": 4.3066666666666665e-05, "loss": 1.4085, "step": 2817 }, { "epoch": 22.544, "grad_norm": 56.34258270263672, "learning_rate": 4.306222222222223e-05, "loss": 1.5593, "step": 2818 }, { "epoch": 22.552, "grad_norm": 52.59615707397461, "learning_rate": 4.305777777777778e-05, "loss": 1.4545, "step": 2819 }, { "epoch": 22.56, "grad_norm": 35.21981430053711, "learning_rate": 4.305333333333334e-05, "loss": 0.9894, "step": 2820 }, { "epoch": 22.568, "grad_norm": 37.49976348876953, "learning_rate": 4.304888888888889e-05, "loss": 1.1309, "step": 2821 }, { "epoch": 22.576, "grad_norm": 31.449024200439453, "learning_rate": 4.3044444444444446e-05, "loss": 1.7203, "step": 2822 }, { "epoch": 22.584, "grad_norm": 62.805763244628906, "learning_rate": 4.304e-05, "loss": 1.3655, "step": 2823 }, { "epoch": 22.592, "grad_norm": 26.10398292541504, "learning_rate": 4.3035555555555556e-05, "loss": 1.356, "step": 2824 }, { "epoch": 22.6, "grad_norm": 24.2155818939209, "learning_rate": 4.303111111111111e-05, "loss": 1.3813, "step": 2825 }, { "epoch": 22.608, "grad_norm": 43.0847053527832, "learning_rate": 4.302666666666667e-05, "loss": 2.0149, "step": 2826 }, { "epoch": 22.616, "grad_norm": 63.30105209350586, "learning_rate": 4.302222222222223e-05, "loss": 1.6483, "step": 2827 }, { "epoch": 22.624, "grad_norm": 35.130374908447266, "learning_rate": 4.301777777777778e-05, "loss": 1.1671, "step": 2828 }, { "epoch": 22.632, "grad_norm": 20.64220428466797, "learning_rate": 4.301333333333333e-05, "loss": 1.4698, "step": 2829 }, { "epoch": 22.64, "grad_norm": 45.38626480102539, "learning_rate": 4.300888888888889e-05, "loss": 1.4174, "step": 2830 }, { "epoch": 22.648, "grad_norm": 51.9398307800293, "learning_rate": 4.300444444444445e-05, "loss": 1.2178, "step": 2831 }, { "epoch": 22.656, "grad_norm": 28.01873016357422, "learning_rate": 4.3e-05, "loss": 2.1592, "step": 2832 }, { "epoch": 22.664, "grad_norm": 34.202083587646484, "learning_rate": 4.2995555555555556e-05, "loss": 2.1569, "step": 2833 }, { "epoch": 22.672, "grad_norm": 38.18495559692383, "learning_rate": 4.299111111111112e-05, "loss": 2.0441, "step": 2834 }, { "epoch": 22.68, "grad_norm": 216.3571319580078, "learning_rate": 4.2986666666666666e-05, "loss": 1.1995, "step": 2835 }, { "epoch": 22.688, "grad_norm": 37.13614273071289, "learning_rate": 4.298222222222222e-05, "loss": 1.3267, "step": 2836 }, { "epoch": 22.696, "grad_norm": 27.708372116088867, "learning_rate": 4.2977777777777776e-05, "loss": 1.0263, "step": 2837 }, { "epoch": 22.704, "grad_norm": 47.70653533935547, "learning_rate": 4.297333333333334e-05, "loss": 1.4611, "step": 2838 }, { "epoch": 22.712, "grad_norm": 39.11276626586914, "learning_rate": 4.296888888888889e-05, "loss": 1.2154, "step": 2839 }, { "epoch": 22.72, "grad_norm": 31.50652313232422, "learning_rate": 4.296444444444445e-05, "loss": 1.4035, "step": 2840 }, { "epoch": 22.728, "grad_norm": 28.777786254882812, "learning_rate": 4.296e-05, "loss": 1.0495, "step": 2841 }, { "epoch": 22.736, "grad_norm": 23.138195037841797, "learning_rate": 4.295555555555556e-05, "loss": 1.1199, "step": 2842 }, { "epoch": 22.744, "grad_norm": 41.431827545166016, "learning_rate": 4.295111111111111e-05, "loss": 1.2766, "step": 2843 }, { "epoch": 22.752, "grad_norm": 62.11495590209961, "learning_rate": 4.2946666666666667e-05, "loss": 1.418, "step": 2844 }, { "epoch": 22.76, "grad_norm": 37.01586151123047, "learning_rate": 4.294222222222222e-05, "loss": 1.3332, "step": 2845 }, { "epoch": 22.768, "grad_norm": 55.17631530761719, "learning_rate": 4.293777777777778e-05, "loss": 0.9792, "step": 2846 }, { "epoch": 22.776, "grad_norm": 22.803361892700195, "learning_rate": 4.293333333333334e-05, "loss": 1.4706, "step": 2847 }, { "epoch": 22.784, "grad_norm": 27.10700798034668, "learning_rate": 4.292888888888889e-05, "loss": 1.1649, "step": 2848 }, { "epoch": 22.792, "grad_norm": 32.5339241027832, "learning_rate": 4.292444444444445e-05, "loss": 1.4482, "step": 2849 }, { "epoch": 22.8, "grad_norm": 31.932403564453125, "learning_rate": 4.292e-05, "loss": 1.1531, "step": 2850 }, { "epoch": 22.808, "grad_norm": 34.23910903930664, "learning_rate": 4.291555555555556e-05, "loss": 1.2315, "step": 2851 }, { "epoch": 22.816, "grad_norm": 22.181081771850586, "learning_rate": 4.291111111111111e-05, "loss": 1.2864, "step": 2852 }, { "epoch": 22.824, "grad_norm": 29.377113342285156, "learning_rate": 4.290666666666667e-05, "loss": 1.5487, "step": 2853 }, { "epoch": 22.832, "grad_norm": 69.3759765625, "learning_rate": 4.290222222222223e-05, "loss": 1.4872, "step": 2854 }, { "epoch": 22.84, "grad_norm": 40.25068664550781, "learning_rate": 4.2897777777777783e-05, "loss": 1.205, "step": 2855 }, { "epoch": 22.848, "grad_norm": 26.44146728515625, "learning_rate": 4.289333333333334e-05, "loss": 1.1526, "step": 2856 }, { "epoch": 22.856, "grad_norm": 37.89056396484375, "learning_rate": 4.2888888888888886e-05, "loss": 1.4179, "step": 2857 }, { "epoch": 22.864, "grad_norm": 52.030723571777344, "learning_rate": 4.288444444444444e-05, "loss": 1.7589, "step": 2858 }, { "epoch": 22.872, "grad_norm": 33.226078033447266, "learning_rate": 4.288e-05, "loss": 1.1537, "step": 2859 }, { "epoch": 22.88, "grad_norm": 31.583498001098633, "learning_rate": 4.287555555555556e-05, "loss": 1.1357, "step": 2860 }, { "epoch": 22.888, "grad_norm": 38.30719757080078, "learning_rate": 4.287111111111111e-05, "loss": 1.5995, "step": 2861 }, { "epoch": 22.896, "grad_norm": 59.73869323730469, "learning_rate": 4.286666666666667e-05, "loss": 1.2497, "step": 2862 }, { "epoch": 22.904, "grad_norm": 28.40349578857422, "learning_rate": 4.286222222222223e-05, "loss": 0.9379, "step": 2863 }, { "epoch": 22.912, "grad_norm": 44.328121185302734, "learning_rate": 4.285777777777778e-05, "loss": 1.3471, "step": 2864 }, { "epoch": 22.92, "grad_norm": 22.121427536010742, "learning_rate": 4.285333333333333e-05, "loss": 2.2195, "step": 2865 }, { "epoch": 22.928, "grad_norm": 88.30846405029297, "learning_rate": 4.284888888888889e-05, "loss": 1.2411, "step": 2866 }, { "epoch": 22.936, "grad_norm": 109.37763977050781, "learning_rate": 4.284444444444445e-05, "loss": 1.4905, "step": 2867 }, { "epoch": 22.944, "grad_norm": 60.07933044433594, "learning_rate": 4.284e-05, "loss": 1.6552, "step": 2868 }, { "epoch": 22.951999999999998, "grad_norm": 22.48024559020996, "learning_rate": 4.283555555555556e-05, "loss": 1.2788, "step": 2869 }, { "epoch": 22.96, "grad_norm": 24.697690963745117, "learning_rate": 4.283111111111111e-05, "loss": 1.6591, "step": 2870 }, { "epoch": 22.968, "grad_norm": 36.30435562133789, "learning_rate": 4.282666666666667e-05, "loss": 1.6224, "step": 2871 }, { "epoch": 22.976, "grad_norm": 42.468605041503906, "learning_rate": 4.282222222222222e-05, "loss": 0.8296, "step": 2872 }, { "epoch": 22.984, "grad_norm": 22.65642547607422, "learning_rate": 4.281777777777778e-05, "loss": 1.6038, "step": 2873 }, { "epoch": 22.992, "grad_norm": 45.49708938598633, "learning_rate": 4.281333333333333e-05, "loss": 1.8803, "step": 2874 }, { "epoch": 23.0, "grad_norm": 28.406639099121094, "learning_rate": 4.2808888888888894e-05, "loss": 1.136, "step": 2875 }, { "epoch": 23.0, "eval_loss": 1.426879644393921, "eval_map": 0.2545, "eval_map_50": 0.5494, "eval_map_75": 0.2107, "eval_map_Coverall": 0.4923, "eval_map_Face_Shield": 0.2912, "eval_map_Gloves": 0.1268, "eval_map_Goggles": 0.1224, "eval_map_Mask": 0.2396, "eval_map_large": 0.3576, "eval_map_medium": 0.1592, "eval_map_small": 0.1063, "eval_mar_1": 0.2443, "eval_mar_10": 0.4326, "eval_mar_100": 0.4518, "eval_mar_100_Coverall": 0.6711, "eval_mar_100_Face_Shield": 0.5176, "eval_mar_100_Gloves": 0.3115, "eval_mar_100_Goggles": 0.4281, "eval_mar_100_Mask": 0.3308, "eval_mar_large": 0.6348, "eval_mar_medium": 0.3439, "eval_mar_small": 0.1454, "eval_runtime": 0.8945, "eval_samples_per_second": 32.421, "eval_steps_per_second": 2.236, "step": 2875 }, { "epoch": 23.008, "grad_norm": 24.3584041595459, "learning_rate": 4.280444444444445e-05, "loss": 1.7402, "step": 2876 }, { "epoch": 23.016, "grad_norm": 22.243328094482422, "learning_rate": 4.2800000000000004e-05, "loss": 1.2854, "step": 2877 }, { "epoch": 23.024, "grad_norm": 37.28318786621094, "learning_rate": 4.279555555555556e-05, "loss": 1.4109, "step": 2878 }, { "epoch": 23.032, "grad_norm": 27.419641494750977, "learning_rate": 4.279111111111111e-05, "loss": 1.493, "step": 2879 }, { "epoch": 23.04, "grad_norm": 50.35676574707031, "learning_rate": 4.278666666666667e-05, "loss": 1.5139, "step": 2880 }, { "epoch": 23.048, "grad_norm": 35.797122955322266, "learning_rate": 4.278222222222222e-05, "loss": 1.2249, "step": 2881 }, { "epoch": 23.056, "grad_norm": 19.66680908203125, "learning_rate": 4.277777777777778e-05, "loss": 1.0372, "step": 2882 }, { "epoch": 23.064, "grad_norm": 88.18250274658203, "learning_rate": 4.277333333333334e-05, "loss": 1.1956, "step": 2883 }, { "epoch": 23.072, "grad_norm": 97.51126098632812, "learning_rate": 4.2768888888888894e-05, "loss": 1.1696, "step": 2884 }, { "epoch": 23.08, "grad_norm": 34.58683395385742, "learning_rate": 4.276444444444445e-05, "loss": 1.2647, "step": 2885 }, { "epoch": 23.088, "grad_norm": 173.92584228515625, "learning_rate": 4.276e-05, "loss": 1.365, "step": 2886 }, { "epoch": 23.096, "grad_norm": 42.72846221923828, "learning_rate": 4.275555555555556e-05, "loss": 1.2647, "step": 2887 }, { "epoch": 23.104, "grad_norm": 15.68558120727539, "learning_rate": 4.2751111111111114e-05, "loss": 1.2841, "step": 2888 }, { "epoch": 23.112, "grad_norm": 26.36305809020996, "learning_rate": 4.274666666666667e-05, "loss": 1.3896, "step": 2889 }, { "epoch": 23.12, "grad_norm": 19.67698097229004, "learning_rate": 4.274222222222222e-05, "loss": 1.0264, "step": 2890 }, { "epoch": 23.128, "grad_norm": 51.790618896484375, "learning_rate": 4.2737777777777785e-05, "loss": 1.3786, "step": 2891 }, { "epoch": 23.136, "grad_norm": 35.11339569091797, "learning_rate": 4.273333333333333e-05, "loss": 1.3239, "step": 2892 }, { "epoch": 23.144, "grad_norm": 41.2672233581543, "learning_rate": 4.272888888888889e-05, "loss": 1.0458, "step": 2893 }, { "epoch": 23.152, "grad_norm": 18.65095329284668, "learning_rate": 4.272444444444444e-05, "loss": 1.3194, "step": 2894 }, { "epoch": 23.16, "grad_norm": 29.88551139831543, "learning_rate": 4.2720000000000004e-05, "loss": 1.2467, "step": 2895 }, { "epoch": 23.168, "grad_norm": 30.541276931762695, "learning_rate": 4.271555555555556e-05, "loss": 1.6122, "step": 2896 }, { "epoch": 23.176, "grad_norm": 35.09401321411133, "learning_rate": 4.2711111111111114e-05, "loss": 1.3133, "step": 2897 }, { "epoch": 23.184, "grad_norm": 18.217607498168945, "learning_rate": 4.270666666666667e-05, "loss": 1.2685, "step": 2898 }, { "epoch": 23.192, "grad_norm": 49.91832733154297, "learning_rate": 4.2702222222222224e-05, "loss": 1.2105, "step": 2899 }, { "epoch": 23.2, "grad_norm": 21.378429412841797, "learning_rate": 4.269777777777778e-05, "loss": 1.0096, "step": 2900 }, { "epoch": 23.208, "grad_norm": 41.23004913330078, "learning_rate": 4.2693333333333333e-05, "loss": 1.1758, "step": 2901 }, { "epoch": 23.216, "grad_norm": 46.588199615478516, "learning_rate": 4.268888888888889e-05, "loss": 1.5408, "step": 2902 }, { "epoch": 23.224, "grad_norm": 25.298006057739258, "learning_rate": 4.268444444444445e-05, "loss": 1.3381, "step": 2903 }, { "epoch": 23.232, "grad_norm": 48.0561637878418, "learning_rate": 4.2680000000000005e-05, "loss": 1.236, "step": 2904 }, { "epoch": 23.24, "grad_norm": 19.531396865844727, "learning_rate": 4.267555555555556e-05, "loss": 1.2672, "step": 2905 }, { "epoch": 23.248, "grad_norm": 28.723922729492188, "learning_rate": 4.2671111111111114e-05, "loss": 1.2571, "step": 2906 }, { "epoch": 23.256, "grad_norm": 28.517681121826172, "learning_rate": 4.266666666666667e-05, "loss": 1.5806, "step": 2907 }, { "epoch": 23.264, "grad_norm": 165.07591247558594, "learning_rate": 4.2662222222222224e-05, "loss": 1.3183, "step": 2908 }, { "epoch": 23.272, "grad_norm": 60.290470123291016, "learning_rate": 4.265777777777778e-05, "loss": 1.3607, "step": 2909 }, { "epoch": 23.28, "grad_norm": 28.12940788269043, "learning_rate": 4.2653333333333334e-05, "loss": 1.2386, "step": 2910 }, { "epoch": 23.288, "grad_norm": 37.42744827270508, "learning_rate": 4.264888888888889e-05, "loss": 3.1895, "step": 2911 }, { "epoch": 23.296, "grad_norm": 73.17342376708984, "learning_rate": 4.264444444444445e-05, "loss": 1.5568, "step": 2912 }, { "epoch": 23.304, "grad_norm": 24.18225860595703, "learning_rate": 4.2640000000000005e-05, "loss": 1.8327, "step": 2913 }, { "epoch": 23.312, "grad_norm": 38.554447174072266, "learning_rate": 4.263555555555555e-05, "loss": 1.3886, "step": 2914 }, { "epoch": 23.32, "grad_norm": 35.32598114013672, "learning_rate": 4.263111111111111e-05, "loss": 1.3255, "step": 2915 }, { "epoch": 23.328, "grad_norm": 32.169708251953125, "learning_rate": 4.262666666666667e-05, "loss": 1.144, "step": 2916 }, { "epoch": 23.336, "grad_norm": 35.39574432373047, "learning_rate": 4.2622222222222224e-05, "loss": 1.1836, "step": 2917 }, { "epoch": 23.344, "grad_norm": 26.3586483001709, "learning_rate": 4.261777777777778e-05, "loss": 1.2391, "step": 2918 }, { "epoch": 23.352, "grad_norm": 17.192609786987305, "learning_rate": 4.2613333333333334e-05, "loss": 1.1967, "step": 2919 }, { "epoch": 23.36, "grad_norm": 27.986133575439453, "learning_rate": 4.2608888888888896e-05, "loss": 0.8718, "step": 2920 }, { "epoch": 23.368, "grad_norm": 84.24005126953125, "learning_rate": 4.2604444444444444e-05, "loss": 1.4112, "step": 2921 }, { "epoch": 23.376, "grad_norm": 23.028053283691406, "learning_rate": 4.26e-05, "loss": 1.3479, "step": 2922 }, { "epoch": 23.384, "grad_norm": 23.6407413482666, "learning_rate": 4.2595555555555554e-05, "loss": 1.6202, "step": 2923 }, { "epoch": 23.392, "grad_norm": 58.601505279541016, "learning_rate": 4.2591111111111115e-05, "loss": 1.4288, "step": 2924 }, { "epoch": 23.4, "grad_norm": 60.13990020751953, "learning_rate": 4.258666666666667e-05, "loss": 0.8238, "step": 2925 }, { "epoch": 23.408, "grad_norm": 43.55455780029297, "learning_rate": 4.2582222222222225e-05, "loss": 1.3908, "step": 2926 }, { "epoch": 23.416, "grad_norm": 32.08633804321289, "learning_rate": 4.257777777777778e-05, "loss": 1.763, "step": 2927 }, { "epoch": 23.424, "grad_norm": 159.6870880126953, "learning_rate": 4.2573333333333335e-05, "loss": 1.0565, "step": 2928 }, { "epoch": 23.432, "grad_norm": 22.954593658447266, "learning_rate": 4.256888888888889e-05, "loss": 1.5872, "step": 2929 }, { "epoch": 23.44, "grad_norm": 49.24017333984375, "learning_rate": 4.2564444444444444e-05, "loss": 1.3426, "step": 2930 }, { "epoch": 23.448, "grad_norm": 94.69576263427734, "learning_rate": 4.256e-05, "loss": 1.5356, "step": 2931 }, { "epoch": 23.456, "grad_norm": 21.771650314331055, "learning_rate": 4.255555555555556e-05, "loss": 1.107, "step": 2932 }, { "epoch": 23.464, "grad_norm": 30.64766502380371, "learning_rate": 4.2551111111111116e-05, "loss": 1.3441, "step": 2933 }, { "epoch": 23.472, "grad_norm": 35.48822021484375, "learning_rate": 4.254666666666667e-05, "loss": 1.2908, "step": 2934 }, { "epoch": 23.48, "grad_norm": 36.063804626464844, "learning_rate": 4.2542222222222225e-05, "loss": 1.4697, "step": 2935 }, { "epoch": 23.488, "grad_norm": 66.49272155761719, "learning_rate": 4.253777777777778e-05, "loss": 1.5023, "step": 2936 }, { "epoch": 23.496, "grad_norm": 23.0269775390625, "learning_rate": 4.2533333333333335e-05, "loss": 1.4059, "step": 2937 }, { "epoch": 23.504, "grad_norm": 19.69516372680664, "learning_rate": 4.252888888888889e-05, "loss": 1.3347, "step": 2938 }, { "epoch": 23.512, "grad_norm": 23.674070358276367, "learning_rate": 4.2524444444444445e-05, "loss": 1.1768, "step": 2939 }, { "epoch": 23.52, "grad_norm": 41.24112319946289, "learning_rate": 4.2520000000000006e-05, "loss": 1.3297, "step": 2940 }, { "epoch": 23.528, "grad_norm": 46.54228210449219, "learning_rate": 4.251555555555556e-05, "loss": 1.151, "step": 2941 }, { "epoch": 23.536, "grad_norm": 27.738107681274414, "learning_rate": 4.2511111111111116e-05, "loss": 1.3835, "step": 2942 }, { "epoch": 23.544, "grad_norm": 41.283206939697266, "learning_rate": 4.2506666666666664e-05, "loss": 2.3503, "step": 2943 }, { "epoch": 23.552, "grad_norm": 22.45799446105957, "learning_rate": 4.2502222222222226e-05, "loss": 1.3868, "step": 2944 }, { "epoch": 23.56, "grad_norm": 22.102813720703125, "learning_rate": 4.249777777777778e-05, "loss": 1.1406, "step": 2945 }, { "epoch": 23.568, "grad_norm": 59.89744567871094, "learning_rate": 4.2493333333333335e-05, "loss": 1.4156, "step": 2946 }, { "epoch": 23.576, "grad_norm": 41.377227783203125, "learning_rate": 4.248888888888889e-05, "loss": 0.9968, "step": 2947 }, { "epoch": 23.584, "grad_norm": 30.73954963684082, "learning_rate": 4.248444444444445e-05, "loss": 1.5384, "step": 2948 }, { "epoch": 23.592, "grad_norm": 25.77431869506836, "learning_rate": 4.248e-05, "loss": 1.6359, "step": 2949 }, { "epoch": 23.6, "grad_norm": 34.0968017578125, "learning_rate": 4.2475555555555555e-05, "loss": 1.2055, "step": 2950 }, { "epoch": 23.608, "grad_norm": 64.58131408691406, "learning_rate": 4.247111111111111e-05, "loss": 1.1266, "step": 2951 }, { "epoch": 23.616, "grad_norm": 29.216283798217773, "learning_rate": 4.246666666666667e-05, "loss": 0.986, "step": 2952 }, { "epoch": 23.624, "grad_norm": 53.06088638305664, "learning_rate": 4.2462222222222226e-05, "loss": 1.2859, "step": 2953 }, { "epoch": 23.632, "grad_norm": 56.91218566894531, "learning_rate": 4.245777777777778e-05, "loss": 1.3643, "step": 2954 }, { "epoch": 23.64, "grad_norm": 50.16166305541992, "learning_rate": 4.2453333333333336e-05, "loss": 1.6008, "step": 2955 }, { "epoch": 23.648, "grad_norm": 45.32609939575195, "learning_rate": 4.244888888888889e-05, "loss": 1.9001, "step": 2956 }, { "epoch": 23.656, "grad_norm": 40.368202209472656, "learning_rate": 4.2444444444444445e-05, "loss": 1.9037, "step": 2957 }, { "epoch": 23.664, "grad_norm": 30.845182418823242, "learning_rate": 4.244e-05, "loss": 1.109, "step": 2958 }, { "epoch": 23.672, "grad_norm": 22.96727180480957, "learning_rate": 4.2435555555555555e-05, "loss": 1.0087, "step": 2959 }, { "epoch": 23.68, "grad_norm": 26.444150924682617, "learning_rate": 4.243111111111111e-05, "loss": 1.5878, "step": 2960 }, { "epoch": 23.688, "grad_norm": 26.811391830444336, "learning_rate": 4.242666666666667e-05, "loss": 1.1582, "step": 2961 }, { "epoch": 23.696, "grad_norm": 27.075162887573242, "learning_rate": 4.2422222222222226e-05, "loss": 1.5827, "step": 2962 }, { "epoch": 23.704, "grad_norm": 49.69591522216797, "learning_rate": 4.241777777777778e-05, "loss": 2.919, "step": 2963 }, { "epoch": 23.712, "grad_norm": 21.993898391723633, "learning_rate": 4.241333333333333e-05, "loss": 1.4684, "step": 2964 }, { "epoch": 23.72, "grad_norm": 43.80036163330078, "learning_rate": 4.240888888888889e-05, "loss": 1.5022, "step": 2965 }, { "epoch": 23.728, "grad_norm": 85.42980194091797, "learning_rate": 4.2404444444444446e-05, "loss": 1.59, "step": 2966 }, { "epoch": 23.736, "grad_norm": 32.12169647216797, "learning_rate": 4.24e-05, "loss": 1.515, "step": 2967 }, { "epoch": 23.744, "grad_norm": 19.587308883666992, "learning_rate": 4.2395555555555555e-05, "loss": 1.4391, "step": 2968 }, { "epoch": 23.752, "grad_norm": 37.64113235473633, "learning_rate": 4.239111111111112e-05, "loss": 1.1714, "step": 2969 }, { "epoch": 23.76, "grad_norm": 31.81230354309082, "learning_rate": 4.238666666666667e-05, "loss": 2.5214, "step": 2970 }, { "epoch": 23.768, "grad_norm": 37.178653717041016, "learning_rate": 4.238222222222222e-05, "loss": 1.1149, "step": 2971 }, { "epoch": 23.776, "grad_norm": 27.118181228637695, "learning_rate": 4.2377777777777775e-05, "loss": 1.0068, "step": 2972 }, { "epoch": 23.784, "grad_norm": 53.588706970214844, "learning_rate": 4.2373333333333336e-05, "loss": 1.5822, "step": 2973 }, { "epoch": 23.792, "grad_norm": 29.434812545776367, "learning_rate": 4.236888888888889e-05, "loss": 1.2479, "step": 2974 }, { "epoch": 23.8, "grad_norm": 35.817813873291016, "learning_rate": 4.2364444444444446e-05, "loss": 1.77, "step": 2975 }, { "epoch": 23.808, "grad_norm": 24.325729370117188, "learning_rate": 4.236e-05, "loss": 1.312, "step": 2976 }, { "epoch": 23.816, "grad_norm": 44.020103454589844, "learning_rate": 4.235555555555556e-05, "loss": 1.312, "step": 2977 }, { "epoch": 23.824, "grad_norm": 26.931943893432617, "learning_rate": 4.235111111111111e-05, "loss": 1.1348, "step": 2978 }, { "epoch": 23.832, "grad_norm": 19.85508918762207, "learning_rate": 4.2346666666666666e-05, "loss": 1.3824, "step": 2979 }, { "epoch": 23.84, "grad_norm": 26.24737548828125, "learning_rate": 4.234222222222222e-05, "loss": 1.3983, "step": 2980 }, { "epoch": 23.848, "grad_norm": 24.559906005859375, "learning_rate": 4.233777777777778e-05, "loss": 0.996, "step": 2981 }, { "epoch": 23.856, "grad_norm": 29.963302612304688, "learning_rate": 4.233333333333334e-05, "loss": 2.2669, "step": 2982 }, { "epoch": 23.864, "grad_norm": 31.904552459716797, "learning_rate": 4.232888888888889e-05, "loss": 1.1784, "step": 2983 }, { "epoch": 23.872, "grad_norm": 74.00634002685547, "learning_rate": 4.2324444444444447e-05, "loss": 0.9337, "step": 2984 }, { "epoch": 23.88, "grad_norm": 49.25193405151367, "learning_rate": 4.232e-05, "loss": 1.2058, "step": 2985 }, { "epoch": 23.888, "grad_norm": 33.64456558227539, "learning_rate": 4.2315555555555556e-05, "loss": 1.4605, "step": 2986 }, { "epoch": 23.896, "grad_norm": 94.32756042480469, "learning_rate": 4.231111111111111e-05, "loss": 1.2177, "step": 2987 }, { "epoch": 23.904, "grad_norm": 25.58683967590332, "learning_rate": 4.2306666666666666e-05, "loss": 1.5504, "step": 2988 }, { "epoch": 23.912, "grad_norm": 140.4877166748047, "learning_rate": 4.230222222222223e-05, "loss": 1.4128, "step": 2989 }, { "epoch": 23.92, "grad_norm": 31.464548110961914, "learning_rate": 4.229777777777778e-05, "loss": 1.3235, "step": 2990 }, { "epoch": 23.928, "grad_norm": 30.060958862304688, "learning_rate": 4.229333333333334e-05, "loss": 1.1965, "step": 2991 }, { "epoch": 23.936, "grad_norm": 17.1815185546875, "learning_rate": 4.228888888888889e-05, "loss": 1.2943, "step": 2992 }, { "epoch": 23.944, "grad_norm": 18.863731384277344, "learning_rate": 4.228444444444445e-05, "loss": 1.1558, "step": 2993 }, { "epoch": 23.951999999999998, "grad_norm": 27.387187957763672, "learning_rate": 4.228e-05, "loss": 1.3847, "step": 2994 }, { "epoch": 23.96, "grad_norm": 37.79708480834961, "learning_rate": 4.227555555555556e-05, "loss": 1.6565, "step": 2995 }, { "epoch": 23.968, "grad_norm": 17.03190040588379, "learning_rate": 4.227111111111111e-05, "loss": 1.2522, "step": 2996 }, { "epoch": 23.976, "grad_norm": 27.900890350341797, "learning_rate": 4.226666666666667e-05, "loss": 1.2035, "step": 2997 }, { "epoch": 23.984, "grad_norm": 69.09425354003906, "learning_rate": 4.226222222222223e-05, "loss": 1.5036, "step": 2998 }, { "epoch": 23.992, "grad_norm": 23.717960357666016, "learning_rate": 4.225777777777778e-05, "loss": 1.6326, "step": 2999 }, { "epoch": 24.0, "grad_norm": 25.500289916992188, "learning_rate": 4.225333333333333e-05, "loss": 1.5275, "step": 3000 }, { "epoch": 24.0, "eval_loss": 1.7538883686065674, "eval_map": 0.1791, "eval_map_50": 0.3997, "eval_map_75": 0.1484, "eval_map_Coverall": 0.3991, "eval_map_Face_Shield": 0.2215, "eval_map_Gloves": 0.1019, "eval_map_Goggles": 0.0533, "eval_map_Mask": 0.1195, "eval_map_large": 0.3143, "eval_map_medium": 0.1095, "eval_map_small": 0.0453, "eval_mar_1": 0.2014, "eval_mar_10": 0.364, "eval_mar_100": 0.3931, "eval_mar_100_Coverall": 0.5911, "eval_mar_100_Face_Shield": 0.5294, "eval_mar_100_Gloves": 0.2803, "eval_mar_100_Goggles": 0.3031, "eval_mar_100_Mask": 0.2615, "eval_mar_large": 0.6157, "eval_mar_medium": 0.285, "eval_mar_small": 0.0785, "eval_runtime": 0.9117, "eval_samples_per_second": 31.809, "eval_steps_per_second": 2.194, "step": 3000 }, { "epoch": 24.008, "grad_norm": 30.53757667541504, "learning_rate": 4.224888888888889e-05, "loss": 1.2936, "step": 3001 }, { "epoch": 24.016, "grad_norm": 29.03043556213379, "learning_rate": 4.224444444444445e-05, "loss": 1.2842, "step": 3002 }, { "epoch": 24.024, "grad_norm": 29.42542839050293, "learning_rate": 4.224e-05, "loss": 1.7681, "step": 3003 }, { "epoch": 24.032, "grad_norm": 31.240083694458008, "learning_rate": 4.223555555555556e-05, "loss": 1.1423, "step": 3004 }, { "epoch": 24.04, "grad_norm": 21.376747131347656, "learning_rate": 4.223111111111112e-05, "loss": 1.216, "step": 3005 }, { "epoch": 24.048, "grad_norm": 23.96097183227539, "learning_rate": 4.222666666666667e-05, "loss": 2.1612, "step": 3006 }, { "epoch": 24.056, "grad_norm": 31.648305892944336, "learning_rate": 4.222222222222222e-05, "loss": 0.7784, "step": 3007 }, { "epoch": 24.064, "grad_norm": 23.23834991455078, "learning_rate": 4.2217777777777776e-05, "loss": 1.1826, "step": 3008 }, { "epoch": 24.072, "grad_norm": 70.57278442382812, "learning_rate": 4.221333333333334e-05, "loss": 1.3666, "step": 3009 }, { "epoch": 24.08, "grad_norm": 24.407325744628906, "learning_rate": 4.220888888888889e-05, "loss": 1.4038, "step": 3010 }, { "epoch": 24.088, "grad_norm": 27.701519012451172, "learning_rate": 4.220444444444445e-05, "loss": 1.6169, "step": 3011 }, { "epoch": 24.096, "grad_norm": 12.385852813720703, "learning_rate": 4.22e-05, "loss": 1.693, "step": 3012 }, { "epoch": 24.104, "grad_norm": 16.432167053222656, "learning_rate": 4.219555555555556e-05, "loss": 1.0515, "step": 3013 }, { "epoch": 24.112, "grad_norm": 18.28042984008789, "learning_rate": 4.219111111111111e-05, "loss": 1.5518, "step": 3014 }, { "epoch": 24.12, "grad_norm": 32.31135559082031, "learning_rate": 4.218666666666667e-05, "loss": 0.9704, "step": 3015 }, { "epoch": 24.128, "grad_norm": 35.653743743896484, "learning_rate": 4.218222222222222e-05, "loss": 1.3602, "step": 3016 }, { "epoch": 24.136, "grad_norm": 110.15170288085938, "learning_rate": 4.217777777777778e-05, "loss": 2.028, "step": 3017 }, { "epoch": 24.144, "grad_norm": 25.60778045654297, "learning_rate": 4.217333333333334e-05, "loss": 1.1368, "step": 3018 }, { "epoch": 24.152, "grad_norm": 41.788150787353516, "learning_rate": 4.216888888888889e-05, "loss": 1.2262, "step": 3019 }, { "epoch": 24.16, "grad_norm": 78.10823059082031, "learning_rate": 4.216444444444445e-05, "loss": 1.2717, "step": 3020 }, { "epoch": 24.168, "grad_norm": 17.574148178100586, "learning_rate": 4.2159999999999996e-05, "loss": 1.363, "step": 3021 }, { "epoch": 24.176, "grad_norm": 57.226436614990234, "learning_rate": 4.215555555555556e-05, "loss": 1.6007, "step": 3022 }, { "epoch": 24.184, "grad_norm": 34.77672576904297, "learning_rate": 4.215111111111111e-05, "loss": 1.2413, "step": 3023 }, { "epoch": 24.192, "grad_norm": 20.687685012817383, "learning_rate": 4.214666666666667e-05, "loss": 2.3608, "step": 3024 }, { "epoch": 24.2, "grad_norm": 69.70166015625, "learning_rate": 4.214222222222222e-05, "loss": 0.8771, "step": 3025 }, { "epoch": 24.208, "grad_norm": 30.074554443359375, "learning_rate": 4.2137777777777784e-05, "loss": 1.2621, "step": 3026 }, { "epoch": 24.216, "grad_norm": 78.96143341064453, "learning_rate": 4.213333333333334e-05, "loss": 1.0616, "step": 3027 }, { "epoch": 24.224, "grad_norm": 26.725107192993164, "learning_rate": 4.212888888888889e-05, "loss": 1.1663, "step": 3028 }, { "epoch": 24.232, "grad_norm": 30.855514526367188, "learning_rate": 4.212444444444444e-05, "loss": 1.2241, "step": 3029 }, { "epoch": 24.24, "grad_norm": 23.16865348815918, "learning_rate": 4.212e-05, "loss": 1.2713, "step": 3030 }, { "epoch": 24.248, "grad_norm": 27.489479064941406, "learning_rate": 4.211555555555556e-05, "loss": 1.9661, "step": 3031 }, { "epoch": 24.256, "grad_norm": 51.35514450073242, "learning_rate": 4.211111111111111e-05, "loss": 0.9918, "step": 3032 }, { "epoch": 24.264, "grad_norm": 24.25865364074707, "learning_rate": 4.210666666666667e-05, "loss": 1.3718, "step": 3033 }, { "epoch": 24.272, "grad_norm": 27.03205680847168, "learning_rate": 4.210222222222223e-05, "loss": 1.343, "step": 3034 }, { "epoch": 24.28, "grad_norm": 207.77012634277344, "learning_rate": 4.209777777777778e-05, "loss": 1.0419, "step": 3035 }, { "epoch": 24.288, "grad_norm": 20.730575561523438, "learning_rate": 4.209333333333333e-05, "loss": 1.2155, "step": 3036 }, { "epoch": 24.296, "grad_norm": 20.054704666137695, "learning_rate": 4.208888888888889e-05, "loss": 0.8918, "step": 3037 }, { "epoch": 24.304, "grad_norm": 39.16994857788086, "learning_rate": 4.208444444444445e-05, "loss": 1.4805, "step": 3038 }, { "epoch": 24.312, "grad_norm": 42.303043365478516, "learning_rate": 4.2080000000000004e-05, "loss": 2.14, "step": 3039 }, { "epoch": 24.32, "grad_norm": 296.9369812011719, "learning_rate": 4.207555555555556e-05, "loss": 1.097, "step": 3040 }, { "epoch": 24.328, "grad_norm": 24.532041549682617, "learning_rate": 4.207111111111111e-05, "loss": 1.2273, "step": 3041 }, { "epoch": 24.336, "grad_norm": 33.346195220947266, "learning_rate": 4.206666666666667e-05, "loss": 1.7404, "step": 3042 }, { "epoch": 24.344, "grad_norm": 20.540672302246094, "learning_rate": 4.206222222222222e-05, "loss": 1.1038, "step": 3043 }, { "epoch": 24.352, "grad_norm": 30.066415786743164, "learning_rate": 4.205777777777778e-05, "loss": 1.1888, "step": 3044 }, { "epoch": 24.36, "grad_norm": 17.23876190185547, "learning_rate": 4.205333333333333e-05, "loss": 1.3685, "step": 3045 }, { "epoch": 24.368, "grad_norm": 21.205730438232422, "learning_rate": 4.2048888888888894e-05, "loss": 1.5853, "step": 3046 }, { "epoch": 24.376, "grad_norm": 32.190818786621094, "learning_rate": 4.204444444444445e-05, "loss": 1.1309, "step": 3047 }, { "epoch": 24.384, "grad_norm": 65.33296966552734, "learning_rate": 4.2040000000000004e-05, "loss": 1.2673, "step": 3048 }, { "epoch": 24.392, "grad_norm": 42.8679313659668, "learning_rate": 4.203555555555556e-05, "loss": 2.8134, "step": 3049 }, { "epoch": 24.4, "grad_norm": 30.769777297973633, "learning_rate": 4.2031111111111114e-05, "loss": 1.4568, "step": 3050 }, { "epoch": 24.408, "grad_norm": 65.76930236816406, "learning_rate": 4.202666666666667e-05, "loss": 1.1741, "step": 3051 }, { "epoch": 24.416, "grad_norm": 29.096717834472656, "learning_rate": 4.2022222222222223e-05, "loss": 0.9982, "step": 3052 }, { "epoch": 24.424, "grad_norm": 27.09832000732422, "learning_rate": 4.201777777777778e-05, "loss": 1.3103, "step": 3053 }, { "epoch": 24.432, "grad_norm": 28.058958053588867, "learning_rate": 4.201333333333334e-05, "loss": 1.3173, "step": 3054 }, { "epoch": 24.44, "grad_norm": 20.112010955810547, "learning_rate": 4.2008888888888895e-05, "loss": 1.3645, "step": 3055 }, { "epoch": 24.448, "grad_norm": 24.214136123657227, "learning_rate": 4.200444444444445e-05, "loss": 1.324, "step": 3056 }, { "epoch": 24.456, "grad_norm": 30.59940528869629, "learning_rate": 4.2e-05, "loss": 1.2979, "step": 3057 }, { "epoch": 24.464, "grad_norm": 29.125938415527344, "learning_rate": 4.199555555555556e-05, "loss": 1.4026, "step": 3058 }, { "epoch": 24.472, "grad_norm": 51.969520568847656, "learning_rate": 4.1991111111111114e-05, "loss": 1.3761, "step": 3059 }, { "epoch": 24.48, "grad_norm": 25.469322204589844, "learning_rate": 4.198666666666667e-05, "loss": 0.9745, "step": 3060 }, { "epoch": 24.488, "grad_norm": 39.12543869018555, "learning_rate": 4.1982222222222224e-05, "loss": 1.7586, "step": 3061 }, { "epoch": 24.496, "grad_norm": 20.09779930114746, "learning_rate": 4.1977777777777785e-05, "loss": 1.5976, "step": 3062 }, { "epoch": 24.504, "grad_norm": 53.273189544677734, "learning_rate": 4.1973333333333334e-05, "loss": 1.2838, "step": 3063 }, { "epoch": 24.512, "grad_norm": 26.900390625, "learning_rate": 4.196888888888889e-05, "loss": 1.8119, "step": 3064 }, { "epoch": 24.52, "grad_norm": 54.46805191040039, "learning_rate": 4.196444444444444e-05, "loss": 1.636, "step": 3065 }, { "epoch": 24.528, "grad_norm": 45.522762298583984, "learning_rate": 4.196e-05, "loss": 0.9311, "step": 3066 }, { "epoch": 24.536, "grad_norm": 34.57870864868164, "learning_rate": 4.195555555555556e-05, "loss": 1.0918, "step": 3067 }, { "epoch": 24.544, "grad_norm": 29.30522918701172, "learning_rate": 4.1951111111111115e-05, "loss": 1.2917, "step": 3068 }, { "epoch": 24.552, "grad_norm": 42.211158752441406, "learning_rate": 4.194666666666667e-05, "loss": 1.5592, "step": 3069 }, { "epoch": 24.56, "grad_norm": 29.553022384643555, "learning_rate": 4.1942222222222224e-05, "loss": 1.5242, "step": 3070 }, { "epoch": 24.568, "grad_norm": 32.05529022216797, "learning_rate": 4.193777777777778e-05, "loss": 1.8001, "step": 3071 }, { "epoch": 24.576, "grad_norm": 49.49238586425781, "learning_rate": 4.1933333333333334e-05, "loss": 1.3396, "step": 3072 }, { "epoch": 24.584, "grad_norm": 16.964765548706055, "learning_rate": 4.192888888888889e-05, "loss": 2.1618, "step": 3073 }, { "epoch": 24.592, "grad_norm": 50.44853210449219, "learning_rate": 4.1924444444444444e-05, "loss": 1.2668, "step": 3074 }, { "epoch": 24.6, "grad_norm": 70.78838348388672, "learning_rate": 4.1920000000000005e-05, "loss": 1.133, "step": 3075 }, { "epoch": 24.608, "grad_norm": 37.84104919433594, "learning_rate": 4.191555555555556e-05, "loss": 1.6483, "step": 3076 }, { "epoch": 24.616, "grad_norm": 30.39556884765625, "learning_rate": 4.1911111111111115e-05, "loss": 1.733, "step": 3077 }, { "epoch": 24.624, "grad_norm": 25.984920501708984, "learning_rate": 4.190666666666666e-05, "loss": 1.0364, "step": 3078 }, { "epoch": 24.632, "grad_norm": 32.91425704956055, "learning_rate": 4.1902222222222225e-05, "loss": 1.995, "step": 3079 }, { "epoch": 24.64, "grad_norm": 80.69921112060547, "learning_rate": 4.189777777777778e-05, "loss": 1.4837, "step": 3080 }, { "epoch": 24.648, "grad_norm": 43.84510040283203, "learning_rate": 4.1893333333333334e-05, "loss": 1.7279, "step": 3081 }, { "epoch": 24.656, "grad_norm": 26.602773666381836, "learning_rate": 4.188888888888889e-05, "loss": 1.345, "step": 3082 }, { "epoch": 24.664, "grad_norm": 22.45857810974121, "learning_rate": 4.188444444444445e-05, "loss": 1.489, "step": 3083 }, { "epoch": 24.672, "grad_norm": 27.386098861694336, "learning_rate": 4.1880000000000006e-05, "loss": 1.6811, "step": 3084 }, { "epoch": 24.68, "grad_norm": 102.18565368652344, "learning_rate": 4.1875555555555554e-05, "loss": 1.1906, "step": 3085 }, { "epoch": 24.688, "grad_norm": 30.743906021118164, "learning_rate": 4.187111111111111e-05, "loss": 1.3814, "step": 3086 }, { "epoch": 24.696, "grad_norm": 21.999788284301758, "learning_rate": 4.186666666666667e-05, "loss": 1.574, "step": 3087 }, { "epoch": 24.704, "grad_norm": 18.676450729370117, "learning_rate": 4.1862222222222225e-05, "loss": 0.935, "step": 3088 }, { "epoch": 24.712, "grad_norm": 32.772823333740234, "learning_rate": 4.185777777777778e-05, "loss": 1.4952, "step": 3089 }, { "epoch": 24.72, "grad_norm": 43.378726959228516, "learning_rate": 4.1853333333333335e-05, "loss": 1.3546, "step": 3090 }, { "epoch": 24.728, "grad_norm": 19.559885025024414, "learning_rate": 4.1848888888888896e-05, "loss": 1.2474, "step": 3091 }, { "epoch": 24.736, "grad_norm": 75.92479705810547, "learning_rate": 4.1844444444444444e-05, "loss": 1.637, "step": 3092 }, { "epoch": 24.744, "grad_norm": 78.02727508544922, "learning_rate": 4.184e-05, "loss": 1.2741, "step": 3093 }, { "epoch": 24.752, "grad_norm": 42.76063537597656, "learning_rate": 4.1835555555555554e-05, "loss": 0.8477, "step": 3094 }, { "epoch": 24.76, "grad_norm": 25.234092712402344, "learning_rate": 4.1831111111111116e-05, "loss": 1.4857, "step": 3095 }, { "epoch": 24.768, "grad_norm": 112.03260040283203, "learning_rate": 4.182666666666667e-05, "loss": 1.6803, "step": 3096 }, { "epoch": 24.776, "grad_norm": 24.668216705322266, "learning_rate": 4.1822222222222225e-05, "loss": 0.8328, "step": 3097 }, { "epoch": 24.784, "grad_norm": 86.57978820800781, "learning_rate": 4.181777777777778e-05, "loss": 1.2833, "step": 3098 }, { "epoch": 24.792, "grad_norm": 22.63311004638672, "learning_rate": 4.1813333333333335e-05, "loss": 2.0874, "step": 3099 }, { "epoch": 24.8, "grad_norm": 24.184839248657227, "learning_rate": 4.180888888888889e-05, "loss": 1.3526, "step": 3100 }, { "epoch": 24.808, "grad_norm": 45.066287994384766, "learning_rate": 4.1804444444444445e-05, "loss": 1.1587, "step": 3101 }, { "epoch": 24.816, "grad_norm": 31.688701629638672, "learning_rate": 4.18e-05, "loss": 1.309, "step": 3102 }, { "epoch": 24.824, "grad_norm": 24.794775009155273, "learning_rate": 4.179555555555556e-05, "loss": 1.547, "step": 3103 }, { "epoch": 24.832, "grad_norm": 17.590600967407227, "learning_rate": 4.1791111111111116e-05, "loss": 1.2017, "step": 3104 }, { "epoch": 24.84, "grad_norm": 39.34941482543945, "learning_rate": 4.178666666666667e-05, "loss": 1.2897, "step": 3105 }, { "epoch": 24.848, "grad_norm": 51.36238098144531, "learning_rate": 4.1782222222222226e-05, "loss": 1.0957, "step": 3106 }, { "epoch": 24.856, "grad_norm": 28.733850479125977, "learning_rate": 4.177777777777778e-05, "loss": 1.1256, "step": 3107 }, { "epoch": 24.864, "grad_norm": 29.86192512512207, "learning_rate": 4.1773333333333335e-05, "loss": 1.299, "step": 3108 }, { "epoch": 24.872, "grad_norm": 25.82880401611328, "learning_rate": 4.176888888888889e-05, "loss": 1.5821, "step": 3109 }, { "epoch": 24.88, "grad_norm": 103.90090942382812, "learning_rate": 4.1764444444444445e-05, "loss": 1.8813, "step": 3110 }, { "epoch": 24.888, "grad_norm": 67.81890106201172, "learning_rate": 4.176000000000001e-05, "loss": 1.6849, "step": 3111 }, { "epoch": 24.896, "grad_norm": 37.83694076538086, "learning_rate": 4.175555555555556e-05, "loss": 1.2283, "step": 3112 }, { "epoch": 24.904, "grad_norm": 20.2652645111084, "learning_rate": 4.1751111111111116e-05, "loss": 1.2658, "step": 3113 }, { "epoch": 24.912, "grad_norm": 31.6890926361084, "learning_rate": 4.1746666666666665e-05, "loss": 1.4611, "step": 3114 }, { "epoch": 24.92, "grad_norm": 27.29201316833496, "learning_rate": 4.174222222222222e-05, "loss": 1.6742, "step": 3115 }, { "epoch": 24.928, "grad_norm": 22.826030731201172, "learning_rate": 4.173777777777778e-05, "loss": 1.1218, "step": 3116 }, { "epoch": 24.936, "grad_norm": 18.15103530883789, "learning_rate": 4.1733333333333336e-05, "loss": 1.1293, "step": 3117 }, { "epoch": 24.944, "grad_norm": 33.194278717041016, "learning_rate": 4.172888888888889e-05, "loss": 1.329, "step": 3118 }, { "epoch": 24.951999999999998, "grad_norm": 50.65646743774414, "learning_rate": 4.1724444444444446e-05, "loss": 1.1576, "step": 3119 }, { "epoch": 24.96, "grad_norm": 28.194910049438477, "learning_rate": 4.172e-05, "loss": 1.004, "step": 3120 }, { "epoch": 24.968, "grad_norm": 25.29680061340332, "learning_rate": 4.1715555555555555e-05, "loss": 1.1696, "step": 3121 }, { "epoch": 24.976, "grad_norm": 19.57758331298828, "learning_rate": 4.171111111111111e-05, "loss": 1.1953, "step": 3122 }, { "epoch": 24.984, "grad_norm": 81.92945098876953, "learning_rate": 4.1706666666666665e-05, "loss": 1.0927, "step": 3123 }, { "epoch": 24.992, "grad_norm": 137.18228149414062, "learning_rate": 4.1702222222222227e-05, "loss": 1.0322, "step": 3124 }, { "epoch": 25.0, "grad_norm": 31.417327880859375, "learning_rate": 4.169777777777778e-05, "loss": 3.1287, "step": 3125 }, { "epoch": 25.0, "eval_loss": 1.4986623525619507, "eval_map": 0.2669, "eval_map_50": 0.5217, "eval_map_75": 0.2446, "eval_map_Coverall": 0.5013, "eval_map_Face_Shield": 0.316, "eval_map_Gloves": 0.1962, "eval_map_Goggles": 0.0473, "eval_map_Mask": 0.2735, "eval_map_large": 0.4563, "eval_map_medium": 0.1762, "eval_map_small": 0.1541, "eval_mar_1": 0.2596, "eval_mar_10": 0.4589, "eval_mar_100": 0.5046, "eval_mar_100_Coverall": 0.66, "eval_mar_100_Face_Shield": 0.6941, "eval_mar_100_Gloves": 0.3475, "eval_mar_100_Goggles": 0.4156, "eval_mar_100_Mask": 0.4058, "eval_mar_large": 0.7328, "eval_mar_medium": 0.3877, "eval_mar_small": 0.2245, "eval_runtime": 0.9138, "eval_samples_per_second": 31.735, "eval_steps_per_second": 2.189, "step": 3125 }, { "epoch": 25.008, "grad_norm": 143.00709533691406, "learning_rate": 4.1693333333333336e-05, "loss": 1.1721, "step": 3126 }, { "epoch": 25.016, "grad_norm": 52.2870979309082, "learning_rate": 4.168888888888889e-05, "loss": 1.2169, "step": 3127 }, { "epoch": 25.024, "grad_norm": 23.35626983642578, "learning_rate": 4.1684444444444446e-05, "loss": 1.2802, "step": 3128 }, { "epoch": 25.032, "grad_norm": 28.68605613708496, "learning_rate": 4.168e-05, "loss": 1.269, "step": 3129 }, { "epoch": 25.04, "grad_norm": 39.98277282714844, "learning_rate": 4.1675555555555556e-05, "loss": 1.3336, "step": 3130 }, { "epoch": 25.048, "grad_norm": 48.82337188720703, "learning_rate": 4.167111111111111e-05, "loss": 1.5914, "step": 3131 }, { "epoch": 25.056, "grad_norm": 32.65192794799805, "learning_rate": 4.166666666666667e-05, "loss": 1.2695, "step": 3132 }, { "epoch": 25.064, "grad_norm": 34.48800277709961, "learning_rate": 4.166222222222223e-05, "loss": 1.5407, "step": 3133 }, { "epoch": 25.072, "grad_norm": 68.66963958740234, "learning_rate": 4.165777777777778e-05, "loss": 1.8667, "step": 3134 }, { "epoch": 25.08, "grad_norm": 60.748809814453125, "learning_rate": 4.165333333333333e-05, "loss": 1.2529, "step": 3135 }, { "epoch": 25.088, "grad_norm": 28.073225021362305, "learning_rate": 4.164888888888889e-05, "loss": 1.4183, "step": 3136 }, { "epoch": 25.096, "grad_norm": 35.17037582397461, "learning_rate": 4.1644444444444446e-05, "loss": 1.2172, "step": 3137 }, { "epoch": 25.104, "grad_norm": 24.275758743286133, "learning_rate": 4.164e-05, "loss": 1.2015, "step": 3138 }, { "epoch": 25.112, "grad_norm": 21.62233543395996, "learning_rate": 4.1635555555555556e-05, "loss": 1.3256, "step": 3139 }, { "epoch": 25.12, "grad_norm": 33.59101867675781, "learning_rate": 4.163111111111112e-05, "loss": 1.2117, "step": 3140 }, { "epoch": 25.128, "grad_norm": 27.53874397277832, "learning_rate": 4.162666666666667e-05, "loss": 1.46, "step": 3141 }, { "epoch": 25.136, "grad_norm": 20.60738754272461, "learning_rate": 4.162222222222222e-05, "loss": 1.0608, "step": 3142 }, { "epoch": 25.144, "grad_norm": 30.507654190063477, "learning_rate": 4.1617777777777775e-05, "loss": 1.1198, "step": 3143 }, { "epoch": 25.152, "grad_norm": 14.29859733581543, "learning_rate": 4.161333333333334e-05, "loss": 1.2372, "step": 3144 }, { "epoch": 25.16, "grad_norm": 25.194705963134766, "learning_rate": 4.160888888888889e-05, "loss": 3.2903, "step": 3145 }, { "epoch": 25.168, "grad_norm": 46.82529830932617, "learning_rate": 4.160444444444445e-05, "loss": 1.3487, "step": 3146 }, { "epoch": 25.176, "grad_norm": 29.44279670715332, "learning_rate": 4.16e-05, "loss": 1.2036, "step": 3147 }, { "epoch": 25.184, "grad_norm": 44.094947814941406, "learning_rate": 4.159555555555556e-05, "loss": 1.2087, "step": 3148 }, { "epoch": 25.192, "grad_norm": 54.75026321411133, "learning_rate": 4.159111111111111e-05, "loss": 1.2132, "step": 3149 }, { "epoch": 25.2, "grad_norm": 59.01725769042969, "learning_rate": 4.1586666666666666e-05, "loss": 1.416, "step": 3150 }, { "epoch": 25.208, "grad_norm": 49.59651184082031, "learning_rate": 4.158222222222222e-05, "loss": 1.4721, "step": 3151 }, { "epoch": 25.216, "grad_norm": 26.910770416259766, "learning_rate": 4.157777777777778e-05, "loss": 1.3988, "step": 3152 }, { "epoch": 25.224, "grad_norm": 14.434588432312012, "learning_rate": 4.157333333333334e-05, "loss": 1.0264, "step": 3153 }, { "epoch": 25.232, "grad_norm": 20.618162155151367, "learning_rate": 4.156888888888889e-05, "loss": 1.0231, "step": 3154 }, { "epoch": 25.24, "grad_norm": 28.695444107055664, "learning_rate": 4.156444444444445e-05, "loss": 1.0759, "step": 3155 }, { "epoch": 25.248, "grad_norm": 25.660457611083984, "learning_rate": 4.156e-05, "loss": 1.2386, "step": 3156 }, { "epoch": 25.256, "grad_norm": 28.436758041381836, "learning_rate": 4.155555555555556e-05, "loss": 1.2742, "step": 3157 }, { "epoch": 25.264, "grad_norm": 27.272302627563477, "learning_rate": 4.155111111111111e-05, "loss": 1.1708, "step": 3158 }, { "epoch": 25.272, "grad_norm": 43.563472747802734, "learning_rate": 4.1546666666666666e-05, "loss": 1.7058, "step": 3159 }, { "epoch": 25.28, "grad_norm": 29.5281982421875, "learning_rate": 4.154222222222223e-05, "loss": 1.2985, "step": 3160 }, { "epoch": 25.288, "grad_norm": 25.883140563964844, "learning_rate": 4.153777777777778e-05, "loss": 1.1415, "step": 3161 }, { "epoch": 25.296, "grad_norm": 30.406818389892578, "learning_rate": 4.153333333333334e-05, "loss": 1.3711, "step": 3162 }, { "epoch": 25.304, "grad_norm": 40.99949645996094, "learning_rate": 4.152888888888889e-05, "loss": 1.8995, "step": 3163 }, { "epoch": 25.312, "grad_norm": 46.38948440551758, "learning_rate": 4.152444444444445e-05, "loss": 1.2846, "step": 3164 }, { "epoch": 25.32, "grad_norm": 421.7497863769531, "learning_rate": 4.152e-05, "loss": 1.8299, "step": 3165 }, { "epoch": 25.328, "grad_norm": 27.13262939453125, "learning_rate": 4.151555555555556e-05, "loss": 1.2658, "step": 3166 }, { "epoch": 25.336, "grad_norm": 37.1972541809082, "learning_rate": 4.151111111111111e-05, "loss": 1.4575, "step": 3167 }, { "epoch": 25.344, "grad_norm": 14.955245018005371, "learning_rate": 4.150666666666667e-05, "loss": 1.159, "step": 3168 }, { "epoch": 25.352, "grad_norm": 25.14494514465332, "learning_rate": 4.150222222222223e-05, "loss": 1.1526, "step": 3169 }, { "epoch": 25.36, "grad_norm": 20.585418701171875, "learning_rate": 4.1497777777777776e-05, "loss": 1.2073, "step": 3170 }, { "epoch": 25.368, "grad_norm": 74.25374603271484, "learning_rate": 4.149333333333333e-05, "loss": 1.0769, "step": 3171 }, { "epoch": 25.376, "grad_norm": 22.626951217651367, "learning_rate": 4.1488888888888886e-05, "loss": 1.0078, "step": 3172 }, { "epoch": 25.384, "grad_norm": 38.23515701293945, "learning_rate": 4.148444444444445e-05, "loss": 1.1872, "step": 3173 }, { "epoch": 25.392, "grad_norm": 54.93226623535156, "learning_rate": 4.148e-05, "loss": 2.089, "step": 3174 }, { "epoch": 25.4, "grad_norm": 107.62715911865234, "learning_rate": 4.147555555555556e-05, "loss": 1.5711, "step": 3175 }, { "epoch": 25.408, "grad_norm": 50.52748489379883, "learning_rate": 4.147111111111111e-05, "loss": 2.2615, "step": 3176 }, { "epoch": 25.416, "grad_norm": 34.3516960144043, "learning_rate": 4.146666666666667e-05, "loss": 1.0983, "step": 3177 }, { "epoch": 25.424, "grad_norm": 16.97142219543457, "learning_rate": 4.146222222222222e-05, "loss": 1.0812, "step": 3178 }, { "epoch": 25.432, "grad_norm": 18.980072021484375, "learning_rate": 4.145777777777778e-05, "loss": 1.3316, "step": 3179 }, { "epoch": 25.44, "grad_norm": 18.20756721496582, "learning_rate": 4.145333333333333e-05, "loss": 1.1556, "step": 3180 }, { "epoch": 25.448, "grad_norm": 21.685522079467773, "learning_rate": 4.144888888888889e-05, "loss": 1.119, "step": 3181 }, { "epoch": 25.456, "grad_norm": 49.442283630371094, "learning_rate": 4.144444444444445e-05, "loss": 1.4645, "step": 3182 }, { "epoch": 25.464, "grad_norm": 23.635406494140625, "learning_rate": 4.144e-05, "loss": 1.2651, "step": 3183 }, { "epoch": 25.472, "grad_norm": 122.87861633300781, "learning_rate": 4.143555555555556e-05, "loss": 1.0431, "step": 3184 }, { "epoch": 25.48, "grad_norm": 39.62846755981445, "learning_rate": 4.143111111111111e-05, "loss": 1.1593, "step": 3185 }, { "epoch": 25.488, "grad_norm": 29.564821243286133, "learning_rate": 4.142666666666667e-05, "loss": 0.9827, "step": 3186 }, { "epoch": 25.496, "grad_norm": 46.280914306640625, "learning_rate": 4.142222222222222e-05, "loss": 1.1193, "step": 3187 }, { "epoch": 25.504, "grad_norm": 28.912696838378906, "learning_rate": 4.141777777777778e-05, "loss": 1.1419, "step": 3188 }, { "epoch": 25.512, "grad_norm": 29.84450912475586, "learning_rate": 4.141333333333334e-05, "loss": 1.1501, "step": 3189 }, { "epoch": 25.52, "grad_norm": 160.38597106933594, "learning_rate": 4.1408888888888894e-05, "loss": 1.2477, "step": 3190 }, { "epoch": 25.528, "grad_norm": 31.61499786376953, "learning_rate": 4.140444444444445e-05, "loss": 1.5018, "step": 3191 }, { "epoch": 25.536, "grad_norm": 19.70735740661621, "learning_rate": 4.14e-05, "loss": 1.2065, "step": 3192 }, { "epoch": 25.544, "grad_norm": 123.26237487792969, "learning_rate": 4.139555555555556e-05, "loss": 1.9996, "step": 3193 }, { "epoch": 25.552, "grad_norm": 30.000160217285156, "learning_rate": 4.139111111111111e-05, "loss": 1.2618, "step": 3194 }, { "epoch": 25.56, "grad_norm": 20.974773406982422, "learning_rate": 4.138666666666667e-05, "loss": 1.3114, "step": 3195 }, { "epoch": 25.568, "grad_norm": 51.306148529052734, "learning_rate": 4.138222222222222e-05, "loss": 1.3001, "step": 3196 }, { "epoch": 25.576, "grad_norm": 19.961458206176758, "learning_rate": 4.1377777777777784e-05, "loss": 1.4367, "step": 3197 }, { "epoch": 25.584, "grad_norm": 35.98255920410156, "learning_rate": 4.137333333333334e-05, "loss": 1.8061, "step": 3198 }, { "epoch": 25.592, "grad_norm": 27.810739517211914, "learning_rate": 4.136888888888889e-05, "loss": 1.6871, "step": 3199 }, { "epoch": 25.6, "grad_norm": 23.595966339111328, "learning_rate": 4.136444444444444e-05, "loss": 1.1546, "step": 3200 }, { "epoch": 25.608, "grad_norm": 25.149866104125977, "learning_rate": 4.1360000000000004e-05, "loss": 1.2726, "step": 3201 }, { "epoch": 25.616, "grad_norm": 29.80702018737793, "learning_rate": 4.135555555555556e-05, "loss": 1.1917, "step": 3202 }, { "epoch": 25.624, "grad_norm": 38.65845489501953, "learning_rate": 4.1351111111111113e-05, "loss": 1.5438, "step": 3203 }, { "epoch": 25.632, "grad_norm": 22.97319221496582, "learning_rate": 4.134666666666667e-05, "loss": 1.3275, "step": 3204 }, { "epoch": 25.64, "grad_norm": 26.973560333251953, "learning_rate": 4.134222222222223e-05, "loss": 1.5191, "step": 3205 }, { "epoch": 25.648, "grad_norm": 65.18722534179688, "learning_rate": 4.133777777777778e-05, "loss": 0.9592, "step": 3206 }, { "epoch": 25.656, "grad_norm": 16.010318756103516, "learning_rate": 4.133333333333333e-05, "loss": 1.1712, "step": 3207 }, { "epoch": 25.664, "grad_norm": 16.049270629882812, "learning_rate": 4.132888888888889e-05, "loss": 1.0, "step": 3208 }, { "epoch": 25.672, "grad_norm": 43.20909881591797, "learning_rate": 4.132444444444445e-05, "loss": 1.1841, "step": 3209 }, { "epoch": 25.68, "grad_norm": 38.94294357299805, "learning_rate": 4.1320000000000004e-05, "loss": 1.2149, "step": 3210 }, { "epoch": 25.688, "grad_norm": 24.844058990478516, "learning_rate": 4.131555555555556e-05, "loss": 1.7848, "step": 3211 }, { "epoch": 25.696, "grad_norm": 30.195159912109375, "learning_rate": 4.1311111111111114e-05, "loss": 1.5636, "step": 3212 }, { "epoch": 25.704, "grad_norm": 30.27652359008789, "learning_rate": 4.130666666666667e-05, "loss": 1.7214, "step": 3213 }, { "epoch": 25.712, "grad_norm": 22.763280868530273, "learning_rate": 4.1302222222222224e-05, "loss": 1.5691, "step": 3214 }, { "epoch": 25.72, "grad_norm": 22.89240264892578, "learning_rate": 4.129777777777778e-05, "loss": 0.984, "step": 3215 }, { "epoch": 25.728, "grad_norm": 21.080202102661133, "learning_rate": 4.129333333333333e-05, "loss": 1.6192, "step": 3216 }, { "epoch": 25.736, "grad_norm": 19.150362014770508, "learning_rate": 4.1288888888888895e-05, "loss": 1.2452, "step": 3217 }, { "epoch": 25.744, "grad_norm": 32.51883316040039, "learning_rate": 4.128444444444445e-05, "loss": 1.4097, "step": 3218 }, { "epoch": 25.752, "grad_norm": 26.398061752319336, "learning_rate": 4.1280000000000005e-05, "loss": 1.327, "step": 3219 }, { "epoch": 25.76, "grad_norm": 17.00984764099121, "learning_rate": 4.127555555555556e-05, "loss": 0.9511, "step": 3220 }, { "epoch": 25.768, "grad_norm": 32.71712112426758, "learning_rate": 4.127111111111111e-05, "loss": 1.4929, "step": 3221 }, { "epoch": 25.776, "grad_norm": 58.18383026123047, "learning_rate": 4.126666666666667e-05, "loss": 1.1576, "step": 3222 }, { "epoch": 25.784, "grad_norm": 21.368892669677734, "learning_rate": 4.1262222222222224e-05, "loss": 1.3664, "step": 3223 }, { "epoch": 25.792, "grad_norm": 41.43092727661133, "learning_rate": 4.125777777777778e-05, "loss": 1.1186, "step": 3224 }, { "epoch": 25.8, "grad_norm": 21.931581497192383, "learning_rate": 4.1253333333333334e-05, "loss": 1.4136, "step": 3225 }, { "epoch": 25.808, "grad_norm": 19.94620132446289, "learning_rate": 4.1248888888888895e-05, "loss": 1.258, "step": 3226 }, { "epoch": 25.816, "grad_norm": 85.91780090332031, "learning_rate": 4.124444444444444e-05, "loss": 1.321, "step": 3227 }, { "epoch": 25.824, "grad_norm": 54.401004791259766, "learning_rate": 4.124e-05, "loss": 1.3458, "step": 3228 }, { "epoch": 25.832, "grad_norm": 17.798437118530273, "learning_rate": 4.123555555555555e-05, "loss": 1.3794, "step": 3229 }, { "epoch": 25.84, "grad_norm": 89.63033294677734, "learning_rate": 4.1231111111111115e-05, "loss": 1.0677, "step": 3230 }, { "epoch": 25.848, "grad_norm": 70.44574737548828, "learning_rate": 4.122666666666667e-05, "loss": 1.2691, "step": 3231 }, { "epoch": 25.856, "grad_norm": 32.996498107910156, "learning_rate": 4.1222222222222224e-05, "loss": 0.9765, "step": 3232 }, { "epoch": 25.864, "grad_norm": 38.14982223510742, "learning_rate": 4.121777777777778e-05, "loss": 2.0172, "step": 3233 }, { "epoch": 25.872, "grad_norm": 27.37763786315918, "learning_rate": 4.1213333333333334e-05, "loss": 1.0763, "step": 3234 }, { "epoch": 25.88, "grad_norm": 25.627887725830078, "learning_rate": 4.120888888888889e-05, "loss": 1.2335, "step": 3235 }, { "epoch": 25.888, "grad_norm": 26.983070373535156, "learning_rate": 4.1204444444444444e-05, "loss": 1.384, "step": 3236 }, { "epoch": 25.896, "grad_norm": 39.55673599243164, "learning_rate": 4.12e-05, "loss": 1.6484, "step": 3237 }, { "epoch": 25.904, "grad_norm": 26.67171287536621, "learning_rate": 4.119555555555556e-05, "loss": 1.139, "step": 3238 }, { "epoch": 25.912, "grad_norm": 27.629981994628906, "learning_rate": 4.1191111111111115e-05, "loss": 1.1256, "step": 3239 }, { "epoch": 25.92, "grad_norm": 23.837013244628906, "learning_rate": 4.118666666666667e-05, "loss": 1.3469, "step": 3240 }, { "epoch": 25.928, "grad_norm": 71.9830551147461, "learning_rate": 4.1182222222222225e-05, "loss": 1.1658, "step": 3241 }, { "epoch": 25.936, "grad_norm": 18.88456916809082, "learning_rate": 4.117777777777778e-05, "loss": 1.3916, "step": 3242 }, { "epoch": 25.944, "grad_norm": 35.99708938598633, "learning_rate": 4.1173333333333334e-05, "loss": 1.2216, "step": 3243 }, { "epoch": 25.951999999999998, "grad_norm": 30.976333618164062, "learning_rate": 4.116888888888889e-05, "loss": 1.0543, "step": 3244 }, { "epoch": 25.96, "grad_norm": 40.60271072387695, "learning_rate": 4.1164444444444444e-05, "loss": 1.0881, "step": 3245 }, { "epoch": 25.968, "grad_norm": 35.11263656616211, "learning_rate": 4.1160000000000006e-05, "loss": 1.5962, "step": 3246 }, { "epoch": 25.976, "grad_norm": 21.040937423706055, "learning_rate": 4.115555555555556e-05, "loss": 1.2003, "step": 3247 }, { "epoch": 25.984, "grad_norm": 70.47148895263672, "learning_rate": 4.1151111111111115e-05, "loss": 2.0889, "step": 3248 }, { "epoch": 25.992, "grad_norm": 36.02577209472656, "learning_rate": 4.1146666666666663e-05, "loss": 0.9399, "step": 3249 }, { "epoch": 26.0, "grad_norm": 34.0584831237793, "learning_rate": 4.1142222222222225e-05, "loss": 1.1972, "step": 3250 }, { "epoch": 26.0, "eval_loss": 1.2527918815612793, "eval_map": 0.2883, "eval_map_50": 0.5894, "eval_map_75": 0.2637, "eval_map_Coverall": 0.5143, "eval_map_Face_Shield": 0.2537, "eval_map_Gloves": 0.2643, "eval_map_Goggles": 0.1264, "eval_map_Mask": 0.2828, "eval_map_large": 0.4459, "eval_map_medium": 0.1825, "eval_map_small": 0.1509, "eval_mar_1": 0.2644, "eval_mar_10": 0.4831, "eval_mar_100": 0.4986, "eval_mar_100_Coverall": 0.7133, "eval_mar_100_Face_Shield": 0.5647, "eval_mar_100_Gloves": 0.423, "eval_mar_100_Goggles": 0.4094, "eval_mar_100_Mask": 0.3827, "eval_mar_large": 0.6616, "eval_mar_medium": 0.3734, "eval_mar_small": 0.216, "eval_runtime": 0.9136, "eval_samples_per_second": 31.742, "eval_steps_per_second": 2.189, "step": 3250 }, { "epoch": 26.008, "grad_norm": 206.1854705810547, "learning_rate": 4.113777777777778e-05, "loss": 1.0712, "step": 3251 }, { "epoch": 26.016, "grad_norm": 26.05936050415039, "learning_rate": 4.1133333333333335e-05, "loss": 1.2025, "step": 3252 }, { "epoch": 26.024, "grad_norm": 32.768455505371094, "learning_rate": 4.112888888888889e-05, "loss": 1.3129, "step": 3253 }, { "epoch": 26.032, "grad_norm": 37.744163513183594, "learning_rate": 4.112444444444445e-05, "loss": 0.9127, "step": 3254 }, { "epoch": 26.04, "grad_norm": 37.916160583496094, "learning_rate": 4.1120000000000006e-05, "loss": 0.9313, "step": 3255 }, { "epoch": 26.048, "grad_norm": 27.860584259033203, "learning_rate": 4.1115555555555554e-05, "loss": 1.3295, "step": 3256 }, { "epoch": 26.056, "grad_norm": 32.864540100097656, "learning_rate": 4.111111111111111e-05, "loss": 1.1125, "step": 3257 }, { "epoch": 26.064, "grad_norm": 55.32715606689453, "learning_rate": 4.110666666666667e-05, "loss": 1.8213, "step": 3258 }, { "epoch": 26.072, "grad_norm": 16.22880744934082, "learning_rate": 4.1102222222222225e-05, "loss": 1.1202, "step": 3259 }, { "epoch": 26.08, "grad_norm": 56.75408935546875, "learning_rate": 4.109777777777778e-05, "loss": 1.0708, "step": 3260 }, { "epoch": 26.088, "grad_norm": 39.368804931640625, "learning_rate": 4.1093333333333335e-05, "loss": 1.1644, "step": 3261 }, { "epoch": 26.096, "grad_norm": 67.55475616455078, "learning_rate": 4.10888888888889e-05, "loss": 1.3618, "step": 3262 }, { "epoch": 26.104, "grad_norm": 24.75886344909668, "learning_rate": 4.1084444444444445e-05, "loss": 1.4444, "step": 3263 }, { "epoch": 26.112, "grad_norm": 31.992822647094727, "learning_rate": 4.108e-05, "loss": 1.0918, "step": 3264 }, { "epoch": 26.12, "grad_norm": 23.36009979248047, "learning_rate": 4.1075555555555555e-05, "loss": 1.143, "step": 3265 }, { "epoch": 26.128, "grad_norm": 48.52139663696289, "learning_rate": 4.1071111111111116e-05, "loss": 1.3214, "step": 3266 }, { "epoch": 26.136, "grad_norm": 20.77483367919922, "learning_rate": 4.106666666666667e-05, "loss": 1.4957, "step": 3267 }, { "epoch": 26.144, "grad_norm": 70.2359619140625, "learning_rate": 4.1062222222222226e-05, "loss": 1.1143, "step": 3268 }, { "epoch": 26.152, "grad_norm": 36.56694030761719, "learning_rate": 4.105777777777778e-05, "loss": 1.4945, "step": 3269 }, { "epoch": 26.16, "grad_norm": 34.26956558227539, "learning_rate": 4.1053333333333336e-05, "loss": 3.6299, "step": 3270 }, { "epoch": 26.168, "grad_norm": 27.559356689453125, "learning_rate": 4.104888888888889e-05, "loss": 1.3273, "step": 3271 }, { "epoch": 26.176, "grad_norm": 25.92137908935547, "learning_rate": 4.1044444444444445e-05, "loss": 2.2864, "step": 3272 }, { "epoch": 26.184, "grad_norm": 247.07791137695312, "learning_rate": 4.104e-05, "loss": 1.4488, "step": 3273 }, { "epoch": 26.192, "grad_norm": 46.7773551940918, "learning_rate": 4.1035555555555555e-05, "loss": 2.7687, "step": 3274 }, { "epoch": 26.2, "grad_norm": 27.395278930664062, "learning_rate": 4.1031111111111117e-05, "loss": 0.8908, "step": 3275 }, { "epoch": 26.208, "grad_norm": 33.6649055480957, "learning_rate": 4.102666666666667e-05, "loss": 1.5085, "step": 3276 }, { "epoch": 26.216, "grad_norm": 32.074371337890625, "learning_rate": 4.1022222222222226e-05, "loss": 0.9862, "step": 3277 }, { "epoch": 26.224, "grad_norm": 21.85225486755371, "learning_rate": 4.1017777777777774e-05, "loss": 1.1743, "step": 3278 }, { "epoch": 26.232, "grad_norm": 34.78070831298828, "learning_rate": 4.1013333333333336e-05, "loss": 1.3667, "step": 3279 }, { "epoch": 26.24, "grad_norm": 19.267404556274414, "learning_rate": 4.100888888888889e-05, "loss": 1.3446, "step": 3280 }, { "epoch": 26.248, "grad_norm": 22.435073852539062, "learning_rate": 4.1004444444444446e-05, "loss": 1.0208, "step": 3281 }, { "epoch": 26.256, "grad_norm": 40.786041259765625, "learning_rate": 4.1e-05, "loss": 1.4568, "step": 3282 }, { "epoch": 26.264, "grad_norm": 70.4704360961914, "learning_rate": 4.099555555555556e-05, "loss": 1.4515, "step": 3283 }, { "epoch": 26.272, "grad_norm": 47.64590835571289, "learning_rate": 4.099111111111111e-05, "loss": 1.3241, "step": 3284 }, { "epoch": 26.28, "grad_norm": 59.62577438354492, "learning_rate": 4.0986666666666665e-05, "loss": 1.177, "step": 3285 }, { "epoch": 26.288, "grad_norm": 20.887712478637695, "learning_rate": 4.098222222222222e-05, "loss": 1.2657, "step": 3286 }, { "epoch": 26.296, "grad_norm": 58.759334564208984, "learning_rate": 4.097777777777778e-05, "loss": 1.1229, "step": 3287 }, { "epoch": 26.304, "grad_norm": 21.283939361572266, "learning_rate": 4.0973333333333336e-05, "loss": 0.8405, "step": 3288 }, { "epoch": 26.312, "grad_norm": 26.096099853515625, "learning_rate": 4.096888888888889e-05, "loss": 1.2932, "step": 3289 }, { "epoch": 26.32, "grad_norm": 30.30364227294922, "learning_rate": 4.0964444444444446e-05, "loss": 1.2161, "step": 3290 }, { "epoch": 26.328, "grad_norm": 56.25259017944336, "learning_rate": 4.096e-05, "loss": 1.4365, "step": 3291 }, { "epoch": 26.336, "grad_norm": 22.059528350830078, "learning_rate": 4.0955555555555556e-05, "loss": 1.2287, "step": 3292 }, { "epoch": 26.344, "grad_norm": 26.885242462158203, "learning_rate": 4.095111111111111e-05, "loss": 0.9122, "step": 3293 }, { "epoch": 26.352, "grad_norm": 26.475784301757812, "learning_rate": 4.0946666666666665e-05, "loss": 1.2466, "step": 3294 }, { "epoch": 26.36, "grad_norm": 22.694881439208984, "learning_rate": 4.094222222222223e-05, "loss": 1.5297, "step": 3295 }, { "epoch": 26.368, "grad_norm": 32.679962158203125, "learning_rate": 4.093777777777778e-05, "loss": 1.6706, "step": 3296 }, { "epoch": 26.376, "grad_norm": 26.128049850463867, "learning_rate": 4.093333333333334e-05, "loss": 1.1084, "step": 3297 }, { "epoch": 26.384, "grad_norm": 34.05936813354492, "learning_rate": 4.092888888888889e-05, "loss": 1.2316, "step": 3298 }, { "epoch": 26.392, "grad_norm": 46.128292083740234, "learning_rate": 4.0924444444444446e-05, "loss": 1.0933, "step": 3299 }, { "epoch": 26.4, "grad_norm": 34.252227783203125, "learning_rate": 4.092e-05, "loss": 1.0965, "step": 3300 }, { "epoch": 26.408, "grad_norm": 14.212733268737793, "learning_rate": 4.0915555555555556e-05, "loss": 0.9324, "step": 3301 }, { "epoch": 26.416, "grad_norm": 75.76534271240234, "learning_rate": 4.091111111111111e-05, "loss": 1.855, "step": 3302 }, { "epoch": 26.424, "grad_norm": 44.83843231201172, "learning_rate": 4.090666666666667e-05, "loss": 1.0692, "step": 3303 }, { "epoch": 26.432, "grad_norm": 24.375564575195312, "learning_rate": 4.090222222222223e-05, "loss": 1.5705, "step": 3304 }, { "epoch": 26.44, "grad_norm": 25.87251091003418, "learning_rate": 4.089777777777778e-05, "loss": 1.2314, "step": 3305 }, { "epoch": 26.448, "grad_norm": 31.544727325439453, "learning_rate": 4.089333333333333e-05, "loss": 0.9248, "step": 3306 }, { "epoch": 26.456, "grad_norm": 30.443035125732422, "learning_rate": 4.088888888888889e-05, "loss": 1.3595, "step": 3307 }, { "epoch": 26.464, "grad_norm": 45.07841873168945, "learning_rate": 4.088444444444445e-05, "loss": 1.4266, "step": 3308 }, { "epoch": 26.472, "grad_norm": 175.04876708984375, "learning_rate": 4.088e-05, "loss": 1.5317, "step": 3309 }, { "epoch": 26.48, "grad_norm": 23.552297592163086, "learning_rate": 4.0875555555555556e-05, "loss": 1.6831, "step": 3310 }, { "epoch": 26.488, "grad_norm": 70.59005737304688, "learning_rate": 4.087111111111112e-05, "loss": 1.9939, "step": 3311 }, { "epoch": 26.496, "grad_norm": 33.278770446777344, "learning_rate": 4.086666666666667e-05, "loss": 1.6933, "step": 3312 }, { "epoch": 26.504, "grad_norm": 27.75226593017578, "learning_rate": 4.086222222222222e-05, "loss": 1.0936, "step": 3313 }, { "epoch": 26.512, "grad_norm": 24.7832088470459, "learning_rate": 4.0857777777777776e-05, "loss": 1.0365, "step": 3314 }, { "epoch": 26.52, "grad_norm": 30.07548713684082, "learning_rate": 4.085333333333334e-05, "loss": 0.8205, "step": 3315 }, { "epoch": 26.528, "grad_norm": 15.362305641174316, "learning_rate": 4.084888888888889e-05, "loss": 1.3437, "step": 3316 }, { "epoch": 26.536, "grad_norm": 30.32872200012207, "learning_rate": 4.084444444444445e-05, "loss": 1.3486, "step": 3317 }, { "epoch": 26.544, "grad_norm": 19.273059844970703, "learning_rate": 4.084e-05, "loss": 1.3353, "step": 3318 }, { "epoch": 26.552, "grad_norm": 29.49330711364746, "learning_rate": 4.083555555555556e-05, "loss": 1.4065, "step": 3319 }, { "epoch": 26.56, "grad_norm": 26.015201568603516, "learning_rate": 4.083111111111111e-05, "loss": 1.0793, "step": 3320 }, { "epoch": 26.568, "grad_norm": 68.69666290283203, "learning_rate": 4.0826666666666667e-05, "loss": 1.654, "step": 3321 }, { "epoch": 26.576, "grad_norm": 34.202693939208984, "learning_rate": 4.082222222222222e-05, "loss": 1.3412, "step": 3322 }, { "epoch": 26.584, "grad_norm": 26.270822525024414, "learning_rate": 4.0817777777777776e-05, "loss": 1.1329, "step": 3323 }, { "epoch": 26.592, "grad_norm": 47.47687530517578, "learning_rate": 4.081333333333334e-05, "loss": 2.0025, "step": 3324 }, { "epoch": 26.6, "grad_norm": 24.861736297607422, "learning_rate": 4.080888888888889e-05, "loss": 1.5538, "step": 3325 }, { "epoch": 26.608, "grad_norm": 96.12983703613281, "learning_rate": 4.080444444444445e-05, "loss": 1.609, "step": 3326 }, { "epoch": 26.616, "grad_norm": 44.85895919799805, "learning_rate": 4.08e-05, "loss": 1.4664, "step": 3327 }, { "epoch": 26.624, "grad_norm": 21.385730743408203, "learning_rate": 4.079555555555556e-05, "loss": 1.1227, "step": 3328 }, { "epoch": 26.632, "grad_norm": 37.061405181884766, "learning_rate": 4.079111111111111e-05, "loss": 1.6549, "step": 3329 }, { "epoch": 26.64, "grad_norm": 23.896644592285156, "learning_rate": 4.078666666666667e-05, "loss": 1.5347, "step": 3330 }, { "epoch": 26.648, "grad_norm": 39.8436164855957, "learning_rate": 4.078222222222222e-05, "loss": 1.4287, "step": 3331 }, { "epoch": 26.656, "grad_norm": 29.845064163208008, "learning_rate": 4.0777777777777783e-05, "loss": 1.5511, "step": 3332 }, { "epoch": 26.664, "grad_norm": 25.918853759765625, "learning_rate": 4.077333333333334e-05, "loss": 1.4549, "step": 3333 }, { "epoch": 26.672, "grad_norm": 20.975086212158203, "learning_rate": 4.076888888888889e-05, "loss": 1.1318, "step": 3334 }, { "epoch": 26.68, "grad_norm": 37.584110260009766, "learning_rate": 4.076444444444444e-05, "loss": 1.3757, "step": 3335 }, { "epoch": 26.688, "grad_norm": 26.832040786743164, "learning_rate": 4.076e-05, "loss": 1.5924, "step": 3336 }, { "epoch": 26.696, "grad_norm": 17.198429107666016, "learning_rate": 4.075555555555556e-05, "loss": 1.0474, "step": 3337 }, { "epoch": 26.704, "grad_norm": 27.23823356628418, "learning_rate": 4.075111111111111e-05, "loss": 1.3474, "step": 3338 }, { "epoch": 26.712, "grad_norm": 20.71897315979004, "learning_rate": 4.074666666666667e-05, "loss": 1.0376, "step": 3339 }, { "epoch": 26.72, "grad_norm": 60.91930389404297, "learning_rate": 4.074222222222223e-05, "loss": 1.299, "step": 3340 }, { "epoch": 26.728, "grad_norm": 45.246559143066406, "learning_rate": 4.073777777777778e-05, "loss": 1.3014, "step": 3341 }, { "epoch": 26.736, "grad_norm": 22.202730178833008, "learning_rate": 4.073333333333333e-05, "loss": 1.1617, "step": 3342 }, { "epoch": 26.744, "grad_norm": 140.18983459472656, "learning_rate": 4.072888888888889e-05, "loss": 0.8931, "step": 3343 }, { "epoch": 26.752, "grad_norm": 23.53260040283203, "learning_rate": 4.072444444444445e-05, "loss": 1.3755, "step": 3344 }, { "epoch": 26.76, "grad_norm": 36.461326599121094, "learning_rate": 4.072e-05, "loss": 1.1774, "step": 3345 }, { "epoch": 26.768, "grad_norm": 70.76636505126953, "learning_rate": 4.071555555555556e-05, "loss": 1.1544, "step": 3346 }, { "epoch": 26.776, "grad_norm": 86.5371322631836, "learning_rate": 4.071111111111111e-05, "loss": 1.5517, "step": 3347 }, { "epoch": 26.784, "grad_norm": 28.93910026550293, "learning_rate": 4.070666666666667e-05, "loss": 1.5123, "step": 3348 }, { "epoch": 26.792, "grad_norm": 19.112655639648438, "learning_rate": 4.070222222222222e-05, "loss": 1.2328, "step": 3349 }, { "epoch": 26.8, "grad_norm": 31.653217315673828, "learning_rate": 4.069777777777778e-05, "loss": 1.1262, "step": 3350 }, { "epoch": 26.808, "grad_norm": 33.33699417114258, "learning_rate": 4.069333333333333e-05, "loss": 1.4503, "step": 3351 }, { "epoch": 26.816, "grad_norm": 39.25535583496094, "learning_rate": 4.0688888888888894e-05, "loss": 1.2995, "step": 3352 }, { "epoch": 26.824, "grad_norm": 33.13154602050781, "learning_rate": 4.068444444444445e-05, "loss": 1.1492, "step": 3353 }, { "epoch": 26.832, "grad_norm": 58.84341812133789, "learning_rate": 4.0680000000000004e-05, "loss": 1.1087, "step": 3354 }, { "epoch": 26.84, "grad_norm": 26.92339324951172, "learning_rate": 4.067555555555556e-05, "loss": 1.6701, "step": 3355 }, { "epoch": 26.848, "grad_norm": 292.9190673828125, "learning_rate": 4.067111111111111e-05, "loss": 1.9833, "step": 3356 }, { "epoch": 26.856, "grad_norm": 42.97812271118164, "learning_rate": 4.066666666666667e-05, "loss": 1.1745, "step": 3357 }, { "epoch": 26.864, "grad_norm": 28.878177642822266, "learning_rate": 4.066222222222222e-05, "loss": 1.6196, "step": 3358 }, { "epoch": 26.872, "grad_norm": 41.145904541015625, "learning_rate": 4.065777777777778e-05, "loss": 1.4602, "step": 3359 }, { "epoch": 26.88, "grad_norm": 28.431941986083984, "learning_rate": 4.065333333333334e-05, "loss": 0.9835, "step": 3360 }, { "epoch": 26.888, "grad_norm": 40.33266067504883, "learning_rate": 4.0648888888888894e-05, "loss": 1.4532, "step": 3361 }, { "epoch": 26.896, "grad_norm": 36.073204040527344, "learning_rate": 4.064444444444445e-05, "loss": 1.4494, "step": 3362 }, { "epoch": 26.904, "grad_norm": 23.613853454589844, "learning_rate": 4.064e-05, "loss": 1.5187, "step": 3363 }, { "epoch": 26.912, "grad_norm": 48.712772369384766, "learning_rate": 4.063555555555556e-05, "loss": 1.4191, "step": 3364 }, { "epoch": 26.92, "grad_norm": 42.52948760986328, "learning_rate": 4.0631111111111114e-05, "loss": 1.0702, "step": 3365 }, { "epoch": 26.928, "grad_norm": 25.946321487426758, "learning_rate": 4.062666666666667e-05, "loss": 1.0692, "step": 3366 }, { "epoch": 26.936, "grad_norm": 32.28261184692383, "learning_rate": 4.062222222222222e-05, "loss": 1.4095, "step": 3367 }, { "epoch": 26.944, "grad_norm": 59.99574661254883, "learning_rate": 4.0617777777777785e-05, "loss": 1.1545, "step": 3368 }, { "epoch": 26.951999999999998, "grad_norm": 33.73733139038086, "learning_rate": 4.061333333333334e-05, "loss": 1.472, "step": 3369 }, { "epoch": 26.96, "grad_norm": 259.3487548828125, "learning_rate": 4.060888888888889e-05, "loss": 1.2395, "step": 3370 }, { "epoch": 26.968, "grad_norm": 86.13064575195312, "learning_rate": 4.060444444444444e-05, "loss": 1.2607, "step": 3371 }, { "epoch": 26.976, "grad_norm": 39.052452087402344, "learning_rate": 4.0600000000000004e-05, "loss": 1.4089, "step": 3372 }, { "epoch": 26.984, "grad_norm": 42.623191833496094, "learning_rate": 4.059555555555556e-05, "loss": 1.37, "step": 3373 }, { "epoch": 26.992, "grad_norm": 26.930696487426758, "learning_rate": 4.0591111111111114e-05, "loss": 1.0309, "step": 3374 }, { "epoch": 27.0, "grad_norm": 24.845291137695312, "learning_rate": 4.058666666666667e-05, "loss": 2.3148, "step": 3375 }, { "epoch": 27.0, "eval_loss": 1.3144981861114502, "eval_map": 0.2655, "eval_map_50": 0.5424, "eval_map_75": 0.2254, "eval_map_Coverall": 0.5471, "eval_map_Face_Shield": 0.2642, "eval_map_Gloves": 0.1606, "eval_map_Goggles": 0.0982, "eval_map_Mask": 0.2577, "eval_map_large": 0.4054, "eval_map_medium": 0.18, "eval_map_small": 0.0836, "eval_mar_1": 0.2661, "eval_mar_10": 0.4469, "eval_mar_100": 0.4593, "eval_mar_100_Coverall": 0.7089, "eval_mar_100_Face_Shield": 0.5412, "eval_mar_100_Gloves": 0.3426, "eval_mar_100_Goggles": 0.35, "eval_mar_100_Mask": 0.3538, "eval_mar_large": 0.619, "eval_mar_medium": 0.3329, "eval_mar_small": 0.1584, "eval_runtime": 0.9102, "eval_samples_per_second": 31.86, "eval_steps_per_second": 2.197, "step": 3375 }, { "epoch": 27.008, "grad_norm": 22.466346740722656, "learning_rate": 4.0582222222222224e-05, "loss": 1.6933, "step": 3376 }, { "epoch": 27.016, "grad_norm": 28.191465377807617, "learning_rate": 4.057777777777778e-05, "loss": 1.0659, "step": 3377 }, { "epoch": 27.024, "grad_norm": 47.04734802246094, "learning_rate": 4.057333333333333e-05, "loss": 1.0768, "step": 3378 }, { "epoch": 27.032, "grad_norm": 28.191322326660156, "learning_rate": 4.056888888888889e-05, "loss": 0.9253, "step": 3379 }, { "epoch": 27.04, "grad_norm": 21.566699981689453, "learning_rate": 4.056444444444444e-05, "loss": 1.2212, "step": 3380 }, { "epoch": 27.048, "grad_norm": 23.9630126953125, "learning_rate": 4.0560000000000005e-05, "loss": 0.9922, "step": 3381 }, { "epoch": 27.056, "grad_norm": 32.525455474853516, "learning_rate": 4.055555555555556e-05, "loss": 1.3458, "step": 3382 }, { "epoch": 27.064, "grad_norm": 46.35676193237305, "learning_rate": 4.0551111111111114e-05, "loss": 1.0967, "step": 3383 }, { "epoch": 27.072, "grad_norm": 23.159395217895508, "learning_rate": 4.054666666666667e-05, "loss": 1.1862, "step": 3384 }, { "epoch": 27.08, "grad_norm": 33.6084098815918, "learning_rate": 4.0542222222222224e-05, "loss": 1.3114, "step": 3385 }, { "epoch": 27.088, "grad_norm": 37.259490966796875, "learning_rate": 4.053777777777778e-05, "loss": 1.5004, "step": 3386 }, { "epoch": 27.096, "grad_norm": 25.697235107421875, "learning_rate": 4.0533333333333334e-05, "loss": 1.2271, "step": 3387 }, { "epoch": 27.104, "grad_norm": 33.19028091430664, "learning_rate": 4.052888888888889e-05, "loss": 1.3329, "step": 3388 }, { "epoch": 27.112, "grad_norm": 30.86740493774414, "learning_rate": 4.052444444444445e-05, "loss": 1.0254, "step": 3389 }, { "epoch": 27.12, "grad_norm": 34.180763244628906, "learning_rate": 4.0520000000000005e-05, "loss": 1.2459, "step": 3390 }, { "epoch": 27.128, "grad_norm": 21.102951049804688, "learning_rate": 4.051555555555556e-05, "loss": 1.2612, "step": 3391 }, { "epoch": 27.136, "grad_norm": 71.07840728759766, "learning_rate": 4.051111111111111e-05, "loss": 1.5007, "step": 3392 }, { "epoch": 27.144, "grad_norm": 22.603994369506836, "learning_rate": 4.050666666666667e-05, "loss": 1.1561, "step": 3393 }, { "epoch": 27.152, "grad_norm": 19.58434295654297, "learning_rate": 4.0502222222222224e-05, "loss": 1.3651, "step": 3394 }, { "epoch": 27.16, "grad_norm": 66.37174224853516, "learning_rate": 4.049777777777778e-05, "loss": 1.5974, "step": 3395 }, { "epoch": 27.168, "grad_norm": 38.119178771972656, "learning_rate": 4.0493333333333334e-05, "loss": 1.5499, "step": 3396 }, { "epoch": 27.176, "grad_norm": 313.83465576171875, "learning_rate": 4.0488888888888896e-05, "loss": 1.2357, "step": 3397 }, { "epoch": 27.184, "grad_norm": 26.02490997314453, "learning_rate": 4.0484444444444444e-05, "loss": 1.3702, "step": 3398 }, { "epoch": 27.192, "grad_norm": 22.3981990814209, "learning_rate": 4.048e-05, "loss": 1.7518, "step": 3399 }, { "epoch": 27.2, "grad_norm": 29.551151275634766, "learning_rate": 4.0475555555555554e-05, "loss": 3.1483, "step": 3400 }, { "epoch": 27.208, "grad_norm": 26.285091400146484, "learning_rate": 4.0471111111111115e-05, "loss": 1.4088, "step": 3401 }, { "epoch": 27.216, "grad_norm": 73.27972412109375, "learning_rate": 4.046666666666667e-05, "loss": 1.5478, "step": 3402 }, { "epoch": 27.224, "grad_norm": 21.544414520263672, "learning_rate": 4.0462222222222225e-05, "loss": 1.3512, "step": 3403 }, { "epoch": 27.232, "grad_norm": 22.843284606933594, "learning_rate": 4.045777777777778e-05, "loss": 1.4506, "step": 3404 }, { "epoch": 27.24, "grad_norm": 32.835060119628906, "learning_rate": 4.0453333333333335e-05, "loss": 1.546, "step": 3405 }, { "epoch": 27.248, "grad_norm": 54.12104415893555, "learning_rate": 4.044888888888889e-05, "loss": 1.6373, "step": 3406 }, { "epoch": 27.256, "grad_norm": 22.307920455932617, "learning_rate": 4.0444444444444444e-05, "loss": 1.2982, "step": 3407 }, { "epoch": 27.264, "grad_norm": 33.859622955322266, "learning_rate": 4.044e-05, "loss": 1.1684, "step": 3408 }, { "epoch": 27.272, "grad_norm": 49.938026428222656, "learning_rate": 4.043555555555556e-05, "loss": 1.2025, "step": 3409 }, { "epoch": 27.28, "grad_norm": 31.83547592163086, "learning_rate": 4.0431111111111116e-05, "loss": 1.4182, "step": 3410 }, { "epoch": 27.288, "grad_norm": 94.49504852294922, "learning_rate": 4.042666666666667e-05, "loss": 1.0322, "step": 3411 }, { "epoch": 27.296, "grad_norm": 22.128604888916016, "learning_rate": 4.0422222222222225e-05, "loss": 2.1202, "step": 3412 }, { "epoch": 27.304, "grad_norm": 30.668691635131836, "learning_rate": 4.041777777777778e-05, "loss": 0.8809, "step": 3413 }, { "epoch": 27.312, "grad_norm": 30.502561569213867, "learning_rate": 4.0413333333333335e-05, "loss": 1.1931, "step": 3414 }, { "epoch": 27.32, "grad_norm": 22.79041290283203, "learning_rate": 4.040888888888889e-05, "loss": 1.5462, "step": 3415 }, { "epoch": 27.328, "grad_norm": 81.79789733886719, "learning_rate": 4.0404444444444445e-05, "loss": 1.5048, "step": 3416 }, { "epoch": 27.336, "grad_norm": 26.8378849029541, "learning_rate": 4.0400000000000006e-05, "loss": 1.6721, "step": 3417 }, { "epoch": 27.344, "grad_norm": 75.71332550048828, "learning_rate": 4.039555555555556e-05, "loss": 0.9724, "step": 3418 }, { "epoch": 27.352, "grad_norm": 24.28411102294922, "learning_rate": 4.0391111111111116e-05, "loss": 1.2813, "step": 3419 }, { "epoch": 27.36, "grad_norm": 22.719669342041016, "learning_rate": 4.0386666666666664e-05, "loss": 1.8897, "step": 3420 }, { "epoch": 27.368, "grad_norm": 36.232147216796875, "learning_rate": 4.0382222222222226e-05, "loss": 1.1961, "step": 3421 }, { "epoch": 27.376, "grad_norm": 26.018062591552734, "learning_rate": 4.037777777777778e-05, "loss": 1.4755, "step": 3422 }, { "epoch": 27.384, "grad_norm": 34.8111572265625, "learning_rate": 4.0373333333333335e-05, "loss": 1.4532, "step": 3423 }, { "epoch": 27.392, "grad_norm": 93.25050354003906, "learning_rate": 4.036888888888889e-05, "loss": 1.2318, "step": 3424 }, { "epoch": 27.4, "grad_norm": 50.37820816040039, "learning_rate": 4.0364444444444445e-05, "loss": 1.5766, "step": 3425 }, { "epoch": 27.408, "grad_norm": 50.20039749145508, "learning_rate": 4.0360000000000007e-05, "loss": 1.4263, "step": 3426 }, { "epoch": 27.416, "grad_norm": 27.888904571533203, "learning_rate": 4.0355555555555555e-05, "loss": 1.5902, "step": 3427 }, { "epoch": 27.424, "grad_norm": 26.0096378326416, "learning_rate": 4.035111111111111e-05, "loss": 1.0315, "step": 3428 }, { "epoch": 27.432, "grad_norm": 60.816986083984375, "learning_rate": 4.0346666666666664e-05, "loss": 2.0478, "step": 3429 }, { "epoch": 27.44, "grad_norm": 62.72433853149414, "learning_rate": 4.0342222222222226e-05, "loss": 2.5229, "step": 3430 }, { "epoch": 27.448, "grad_norm": 18.59469223022461, "learning_rate": 4.033777777777778e-05, "loss": 1.144, "step": 3431 }, { "epoch": 27.456, "grad_norm": 41.63256072998047, "learning_rate": 4.0333333333333336e-05, "loss": 1.7541, "step": 3432 }, { "epoch": 27.464, "grad_norm": 41.21454620361328, "learning_rate": 4.032888888888889e-05, "loss": 1.5038, "step": 3433 }, { "epoch": 27.472, "grad_norm": 42.37861633300781, "learning_rate": 4.0324444444444445e-05, "loss": 0.8727, "step": 3434 }, { "epoch": 27.48, "grad_norm": 24.449460983276367, "learning_rate": 4.032e-05, "loss": 1.0449, "step": 3435 }, { "epoch": 27.488, "grad_norm": 27.601274490356445, "learning_rate": 4.0315555555555555e-05, "loss": 1.1276, "step": 3436 }, { "epoch": 27.496, "grad_norm": 35.51472473144531, "learning_rate": 4.031111111111111e-05, "loss": 1.1634, "step": 3437 }, { "epoch": 27.504, "grad_norm": 61.50163650512695, "learning_rate": 4.030666666666667e-05, "loss": 1.6869, "step": 3438 }, { "epoch": 27.512, "grad_norm": 59.91775131225586, "learning_rate": 4.0302222222222226e-05, "loss": 1.2309, "step": 3439 }, { "epoch": 27.52, "grad_norm": 70.42138671875, "learning_rate": 4.029777777777778e-05, "loss": 1.1011, "step": 3440 }, { "epoch": 27.528, "grad_norm": 28.68022346496582, "learning_rate": 4.0293333333333336e-05, "loss": 1.1418, "step": 3441 }, { "epoch": 27.536, "grad_norm": 29.804561614990234, "learning_rate": 4.028888888888889e-05, "loss": 1.2826, "step": 3442 }, { "epoch": 27.544, "grad_norm": 91.89447021484375, "learning_rate": 4.0284444444444446e-05, "loss": 1.3503, "step": 3443 }, { "epoch": 27.552, "grad_norm": 39.36943054199219, "learning_rate": 4.028e-05, "loss": 1.3974, "step": 3444 }, { "epoch": 27.56, "grad_norm": 23.727825164794922, "learning_rate": 4.0275555555555555e-05, "loss": 1.2731, "step": 3445 }, { "epoch": 27.568, "grad_norm": 21.656469345092773, "learning_rate": 4.027111111111112e-05, "loss": 1.5317, "step": 3446 }, { "epoch": 27.576, "grad_norm": 45.18101119995117, "learning_rate": 4.026666666666667e-05, "loss": 1.2869, "step": 3447 }, { "epoch": 27.584, "grad_norm": 44.62477493286133, "learning_rate": 4.026222222222223e-05, "loss": 1.6632, "step": 3448 }, { "epoch": 27.592, "grad_norm": 28.44662094116211, "learning_rate": 4.0257777777777775e-05, "loss": 0.8714, "step": 3449 }, { "epoch": 27.6, "grad_norm": 22.678499221801758, "learning_rate": 4.0253333333333336e-05, "loss": 1.3846, "step": 3450 }, { "epoch": 27.608, "grad_norm": 22.897127151489258, "learning_rate": 4.024888888888889e-05, "loss": 1.0966, "step": 3451 }, { "epoch": 27.616, "grad_norm": 17.626264572143555, "learning_rate": 4.0244444444444446e-05, "loss": 1.0921, "step": 3452 }, { "epoch": 27.624, "grad_norm": 60.546875, "learning_rate": 4.024e-05, "loss": 1.335, "step": 3453 }, { "epoch": 27.632, "grad_norm": 20.383737564086914, "learning_rate": 4.023555555555556e-05, "loss": 1.1927, "step": 3454 }, { "epoch": 27.64, "grad_norm": 54.09278106689453, "learning_rate": 4.023111111111111e-05, "loss": 1.0645, "step": 3455 }, { "epoch": 27.648, "grad_norm": 38.82595443725586, "learning_rate": 4.0226666666666666e-05, "loss": 1.3683, "step": 3456 }, { "epoch": 27.656, "grad_norm": 97.85919189453125, "learning_rate": 4.022222222222222e-05, "loss": 1.9083, "step": 3457 }, { "epoch": 27.664, "grad_norm": 18.517704010009766, "learning_rate": 4.021777777777778e-05, "loss": 1.3728, "step": 3458 }, { "epoch": 27.672, "grad_norm": 20.783203125, "learning_rate": 4.021333333333334e-05, "loss": 1.1505, "step": 3459 }, { "epoch": 27.68, "grad_norm": 65.53314971923828, "learning_rate": 4.020888888888889e-05, "loss": 1.523, "step": 3460 }, { "epoch": 27.688, "grad_norm": 63.54396438598633, "learning_rate": 4.0204444444444447e-05, "loss": 1.2927, "step": 3461 }, { "epoch": 27.696, "grad_norm": 26.09975814819336, "learning_rate": 4.02e-05, "loss": 1.1547, "step": 3462 }, { "epoch": 27.704, "grad_norm": 23.142587661743164, "learning_rate": 4.0195555555555556e-05, "loss": 1.277, "step": 3463 }, { "epoch": 27.712, "grad_norm": 21.87540626525879, "learning_rate": 4.019111111111111e-05, "loss": 1.415, "step": 3464 }, { "epoch": 27.72, "grad_norm": 86.69486999511719, "learning_rate": 4.0186666666666666e-05, "loss": 1.2482, "step": 3465 }, { "epoch": 27.728, "grad_norm": 125.3161849975586, "learning_rate": 4.018222222222223e-05, "loss": 1.2823, "step": 3466 }, { "epoch": 27.736, "grad_norm": 36.08430099487305, "learning_rate": 4.017777777777778e-05, "loss": 1.5768, "step": 3467 }, { "epoch": 27.744, "grad_norm": 117.65678405761719, "learning_rate": 4.017333333333334e-05, "loss": 1.4189, "step": 3468 }, { "epoch": 27.752, "grad_norm": 20.939899444580078, "learning_rate": 4.016888888888889e-05, "loss": 1.1239, "step": 3469 }, { "epoch": 27.76, "grad_norm": 22.65833854675293, "learning_rate": 4.016444444444445e-05, "loss": 1.727, "step": 3470 }, { "epoch": 27.768, "grad_norm": 32.01997375488281, "learning_rate": 4.016e-05, "loss": 1.1172, "step": 3471 }, { "epoch": 27.776, "grad_norm": 33.04378128051758, "learning_rate": 4.0155555555555557e-05, "loss": 1.1358, "step": 3472 }, { "epoch": 27.784, "grad_norm": 32.15032196044922, "learning_rate": 4.015111111111111e-05, "loss": 1.0289, "step": 3473 }, { "epoch": 27.792, "grad_norm": 172.67623901367188, "learning_rate": 4.014666666666667e-05, "loss": 1.2074, "step": 3474 }, { "epoch": 27.8, "grad_norm": 77.5293960571289, "learning_rate": 4.014222222222223e-05, "loss": 1.6834, "step": 3475 }, { "epoch": 27.808, "grad_norm": 27.545087814331055, "learning_rate": 4.013777777777778e-05, "loss": 1.2299, "step": 3476 }, { "epoch": 27.816, "grad_norm": 33.987361907958984, "learning_rate": 4.013333333333333e-05, "loss": 1.4394, "step": 3477 }, { "epoch": 27.824, "grad_norm": 36.34238052368164, "learning_rate": 4.0128888888888886e-05, "loss": 1.515, "step": 3478 }, { "epoch": 27.832, "grad_norm": 87.22040557861328, "learning_rate": 4.012444444444445e-05, "loss": 2.2259, "step": 3479 }, { "epoch": 27.84, "grad_norm": 44.713680267333984, "learning_rate": 4.012e-05, "loss": 1.2599, "step": 3480 }, { "epoch": 27.848, "grad_norm": 48.97951126098633, "learning_rate": 4.011555555555556e-05, "loss": 1.1663, "step": 3481 }, { "epoch": 27.856, "grad_norm": 21.271879196166992, "learning_rate": 4.011111111111111e-05, "loss": 1.3584, "step": 3482 }, { "epoch": 27.864, "grad_norm": 20.85839080810547, "learning_rate": 4.0106666666666673e-05, "loss": 1.2017, "step": 3483 }, { "epoch": 27.872, "grad_norm": 31.914352416992188, "learning_rate": 4.010222222222222e-05, "loss": 1.4834, "step": 3484 }, { "epoch": 27.88, "grad_norm": 16.57182502746582, "learning_rate": 4.0097777777777776e-05, "loss": 1.1647, "step": 3485 }, { "epoch": 27.888, "grad_norm": 42.13671875, "learning_rate": 4.009333333333333e-05, "loss": 1.4102, "step": 3486 }, { "epoch": 27.896, "grad_norm": 59.177650451660156, "learning_rate": 4.008888888888889e-05, "loss": 0.9459, "step": 3487 }, { "epoch": 27.904, "grad_norm": 22.546518325805664, "learning_rate": 4.008444444444445e-05, "loss": 1.8042, "step": 3488 }, { "epoch": 27.912, "grad_norm": 17.59703826904297, "learning_rate": 4.008e-05, "loss": 1.2912, "step": 3489 }, { "epoch": 27.92, "grad_norm": 20.168764114379883, "learning_rate": 4.007555555555556e-05, "loss": 1.1563, "step": 3490 }, { "epoch": 27.928, "grad_norm": 23.55995750427246, "learning_rate": 4.007111111111111e-05, "loss": 0.9397, "step": 3491 }, { "epoch": 27.936, "grad_norm": 45.046302795410156, "learning_rate": 4.006666666666667e-05, "loss": 1.3102, "step": 3492 }, { "epoch": 27.944, "grad_norm": 51.438819885253906, "learning_rate": 4.006222222222222e-05, "loss": 1.3385, "step": 3493 }, { "epoch": 27.951999999999998, "grad_norm": 33.27512741088867, "learning_rate": 4.005777777777778e-05, "loss": 1.6316, "step": 3494 }, { "epoch": 27.96, "grad_norm": 22.75728416442871, "learning_rate": 4.005333333333334e-05, "loss": 1.5595, "step": 3495 }, { "epoch": 27.968, "grad_norm": 104.77400970458984, "learning_rate": 4.004888888888889e-05, "loss": 1.8044, "step": 3496 }, { "epoch": 27.976, "grad_norm": 64.77040100097656, "learning_rate": 4.004444444444445e-05, "loss": 1.4359, "step": 3497 }, { "epoch": 27.984, "grad_norm": 25.206430435180664, "learning_rate": 4.004e-05, "loss": 1.4018, "step": 3498 }, { "epoch": 27.992, "grad_norm": 24.342971801757812, "learning_rate": 4.003555555555556e-05, "loss": 0.9515, "step": 3499 }, { "epoch": 28.0, "grad_norm": 127.36905670166016, "learning_rate": 4.003111111111111e-05, "loss": 1.1932, "step": 3500 }, { "epoch": 28.0, "eval_loss": 1.2594845294952393, "eval_map": 0.2903, "eval_map_50": 0.5909, "eval_map_75": 0.2643, "eval_map_Coverall": 0.5771, "eval_map_Face_Shield": 0.2921, "eval_map_Gloves": 0.2035, "eval_map_Goggles": 0.0764, "eval_map_Mask": 0.3025, "eval_map_large": 0.431, "eval_map_medium": 0.1724, "eval_map_small": 0.136, "eval_mar_1": 0.2561, "eval_mar_10": 0.4785, "eval_mar_100": 0.4846, "eval_mar_100_Coverall": 0.7378, "eval_mar_100_Face_Shield": 0.5059, "eval_mar_100_Gloves": 0.3607, "eval_mar_100_Goggles": 0.3938, "eval_mar_100_Mask": 0.425, "eval_mar_large": 0.6233, "eval_mar_medium": 0.3412, "eval_mar_small": 0.2393, "eval_runtime": 0.9105, "eval_samples_per_second": 31.85, "eval_steps_per_second": 2.197, "step": 3500 }, { "epoch": 28.008, "grad_norm": 26.96381378173828, "learning_rate": 4.002666666666667e-05, "loss": 1.1295, "step": 3501 }, { "epoch": 28.016, "grad_norm": 30.11985969543457, "learning_rate": 4.002222222222222e-05, "loss": 1.1195, "step": 3502 }, { "epoch": 28.024, "grad_norm": 35.18368148803711, "learning_rate": 4.0017777777777784e-05, "loss": 1.3265, "step": 3503 }, { "epoch": 28.032, "grad_norm": 36.05278396606445, "learning_rate": 4.001333333333334e-05, "loss": 1.6648, "step": 3504 }, { "epoch": 28.04, "grad_norm": 53.561641693115234, "learning_rate": 4.0008888888888894e-05, "loss": 1.2754, "step": 3505 }, { "epoch": 28.048, "grad_norm": 55.94902420043945, "learning_rate": 4.000444444444444e-05, "loss": 1.5033, "step": 3506 }, { "epoch": 28.056, "grad_norm": 26.928104400634766, "learning_rate": 4e-05, "loss": 1.2138, "step": 3507 }, { "epoch": 28.064, "grad_norm": 24.951295852661133, "learning_rate": 3.999555555555556e-05, "loss": 1.0251, "step": 3508 }, { "epoch": 28.072, "grad_norm": 54.23612976074219, "learning_rate": 3.999111111111111e-05, "loss": 2.8731, "step": 3509 }, { "epoch": 28.08, "grad_norm": 39.380916595458984, "learning_rate": 3.998666666666667e-05, "loss": 1.5265, "step": 3510 }, { "epoch": 28.088, "grad_norm": 28.580493927001953, "learning_rate": 3.998222222222223e-05, "loss": 1.0655, "step": 3511 }, { "epoch": 28.096, "grad_norm": 18.17502212524414, "learning_rate": 3.997777777777778e-05, "loss": 1.0465, "step": 3512 }, { "epoch": 28.104, "grad_norm": 36.48805618286133, "learning_rate": 3.997333333333333e-05, "loss": 1.1485, "step": 3513 }, { "epoch": 28.112, "grad_norm": 41.05744552612305, "learning_rate": 3.996888888888889e-05, "loss": 1.4979, "step": 3514 }, { "epoch": 28.12, "grad_norm": 31.729393005371094, "learning_rate": 3.996444444444445e-05, "loss": 2.3987, "step": 3515 }, { "epoch": 28.128, "grad_norm": 41.88873291015625, "learning_rate": 3.9960000000000004e-05, "loss": 0.9144, "step": 3516 }, { "epoch": 28.136, "grad_norm": 26.00458335876465, "learning_rate": 3.995555555555556e-05, "loss": 1.6358, "step": 3517 }, { "epoch": 28.144, "grad_norm": 24.380870819091797, "learning_rate": 3.995111111111111e-05, "loss": 1.5161, "step": 3518 }, { "epoch": 28.152, "grad_norm": 25.59930419921875, "learning_rate": 3.994666666666667e-05, "loss": 1.1848, "step": 3519 }, { "epoch": 28.16, "grad_norm": 29.275503158569336, "learning_rate": 3.994222222222222e-05, "loss": 1.0106, "step": 3520 }, { "epoch": 28.168, "grad_norm": 29.125490188598633, "learning_rate": 3.993777777777778e-05, "loss": 1.1946, "step": 3521 }, { "epoch": 28.176, "grad_norm": 44.00692367553711, "learning_rate": 3.993333333333333e-05, "loss": 1.2428, "step": 3522 }, { "epoch": 28.184, "grad_norm": 26.317359924316406, "learning_rate": 3.9928888888888894e-05, "loss": 1.4385, "step": 3523 }, { "epoch": 28.192, "grad_norm": 28.3964786529541, "learning_rate": 3.992444444444445e-05, "loss": 1.1504, "step": 3524 }, { "epoch": 28.2, "grad_norm": 37.584041595458984, "learning_rate": 3.9920000000000004e-05, "loss": 1.1962, "step": 3525 }, { "epoch": 28.208, "grad_norm": 40.662105560302734, "learning_rate": 3.991555555555556e-05, "loss": 1.2224, "step": 3526 }, { "epoch": 28.216, "grad_norm": 19.149240493774414, "learning_rate": 3.9911111111111114e-05, "loss": 1.0583, "step": 3527 }, { "epoch": 28.224, "grad_norm": 90.47821044921875, "learning_rate": 3.990666666666667e-05, "loss": 1.4111, "step": 3528 }, { "epoch": 28.232, "grad_norm": 21.892967224121094, "learning_rate": 3.9902222222222223e-05, "loss": 1.0447, "step": 3529 }, { "epoch": 28.24, "grad_norm": 29.94180679321289, "learning_rate": 3.989777777777778e-05, "loss": 0.9767, "step": 3530 }, { "epoch": 28.248, "grad_norm": 60.94210433959961, "learning_rate": 3.989333333333333e-05, "loss": 1.2252, "step": 3531 }, { "epoch": 28.256, "grad_norm": 27.784494400024414, "learning_rate": 3.9888888888888895e-05, "loss": 1.3913, "step": 3532 }, { "epoch": 28.264, "grad_norm": 23.693605422973633, "learning_rate": 3.988444444444445e-05, "loss": 1.366, "step": 3533 }, { "epoch": 28.272, "grad_norm": 37.278343200683594, "learning_rate": 3.988e-05, "loss": 1.1465, "step": 3534 }, { "epoch": 28.28, "grad_norm": 34.63630676269531, "learning_rate": 3.987555555555555e-05, "loss": 0.8452, "step": 3535 }, { "epoch": 28.288, "grad_norm": 21.267858505249023, "learning_rate": 3.9871111111111114e-05, "loss": 1.1627, "step": 3536 }, { "epoch": 28.296, "grad_norm": 34.97475814819336, "learning_rate": 3.986666666666667e-05, "loss": 1.6236, "step": 3537 }, { "epoch": 28.304, "grad_norm": 31.820209503173828, "learning_rate": 3.9862222222222224e-05, "loss": 1.55, "step": 3538 }, { "epoch": 28.312, "grad_norm": 68.52306365966797, "learning_rate": 3.985777777777778e-05, "loss": 1.4331, "step": 3539 }, { "epoch": 28.32, "grad_norm": 89.99808502197266, "learning_rate": 3.985333333333334e-05, "loss": 1.6314, "step": 3540 }, { "epoch": 28.328, "grad_norm": 29.687475204467773, "learning_rate": 3.984888888888889e-05, "loss": 1.1805, "step": 3541 }, { "epoch": 28.336, "grad_norm": 19.617238998413086, "learning_rate": 3.984444444444444e-05, "loss": 1.1256, "step": 3542 }, { "epoch": 28.344, "grad_norm": 21.21929168701172, "learning_rate": 3.984e-05, "loss": 0.8211, "step": 3543 }, { "epoch": 28.352, "grad_norm": 41.79854202270508, "learning_rate": 3.983555555555556e-05, "loss": 1.5074, "step": 3544 }, { "epoch": 28.36, "grad_norm": 17.963356018066406, "learning_rate": 3.9831111111111114e-05, "loss": 1.0729, "step": 3545 }, { "epoch": 28.368, "grad_norm": 30.935546875, "learning_rate": 3.982666666666667e-05, "loss": 1.1634, "step": 3546 }, { "epoch": 28.376, "grad_norm": 34.746585845947266, "learning_rate": 3.9822222222222224e-05, "loss": 2.1067, "step": 3547 }, { "epoch": 28.384, "grad_norm": 135.1561737060547, "learning_rate": 3.981777777777778e-05, "loss": 0.9619, "step": 3548 }, { "epoch": 28.392, "grad_norm": 19.620399475097656, "learning_rate": 3.9813333333333334e-05, "loss": 0.9977, "step": 3549 }, { "epoch": 28.4, "grad_norm": 16.659059524536133, "learning_rate": 3.980888888888889e-05, "loss": 1.4288, "step": 3550 }, { "epoch": 28.408, "grad_norm": 28.380691528320312, "learning_rate": 3.9804444444444444e-05, "loss": 1.2231, "step": 3551 }, { "epoch": 28.416, "grad_norm": 28.05398941040039, "learning_rate": 3.9800000000000005e-05, "loss": 0.9468, "step": 3552 }, { "epoch": 28.424, "grad_norm": 56.166038513183594, "learning_rate": 3.979555555555556e-05, "loss": 1.0484, "step": 3553 }, { "epoch": 28.432, "grad_norm": 334.9261169433594, "learning_rate": 3.9791111111111115e-05, "loss": 1.385, "step": 3554 }, { "epoch": 28.44, "grad_norm": 21.525197982788086, "learning_rate": 3.978666666666667e-05, "loss": 0.945, "step": 3555 }, { "epoch": 28.448, "grad_norm": 25.345367431640625, "learning_rate": 3.9782222222222225e-05, "loss": 0.9777, "step": 3556 }, { "epoch": 28.456, "grad_norm": 56.77970886230469, "learning_rate": 3.977777777777778e-05, "loss": 1.8755, "step": 3557 }, { "epoch": 28.464, "grad_norm": 39.22376251220703, "learning_rate": 3.9773333333333334e-05, "loss": 1.297, "step": 3558 }, { "epoch": 28.472, "grad_norm": 27.812301635742188, "learning_rate": 3.976888888888889e-05, "loss": 1.4139, "step": 3559 }, { "epoch": 28.48, "grad_norm": 57.64317321777344, "learning_rate": 3.976444444444445e-05, "loss": 1.5364, "step": 3560 }, { "epoch": 28.488, "grad_norm": 13.889812469482422, "learning_rate": 3.9760000000000006e-05, "loss": 1.0469, "step": 3561 }, { "epoch": 28.496, "grad_norm": 19.21933364868164, "learning_rate": 3.9755555555555554e-05, "loss": 1.4425, "step": 3562 }, { "epoch": 28.504, "grad_norm": 31.575162887573242, "learning_rate": 3.975111111111111e-05, "loss": 1.386, "step": 3563 }, { "epoch": 28.512, "grad_norm": 50.21656036376953, "learning_rate": 3.974666666666667e-05, "loss": 2.7133, "step": 3564 }, { "epoch": 28.52, "grad_norm": 43.208980560302734, "learning_rate": 3.9742222222222225e-05, "loss": 0.8036, "step": 3565 }, { "epoch": 28.528, "grad_norm": 14.72153091430664, "learning_rate": 3.973777777777778e-05, "loss": 1.0666, "step": 3566 }, { "epoch": 28.536, "grad_norm": 18.787137985229492, "learning_rate": 3.9733333333333335e-05, "loss": 1.6582, "step": 3567 }, { "epoch": 28.544, "grad_norm": 28.834081649780273, "learning_rate": 3.9728888888888896e-05, "loss": 0.9739, "step": 3568 }, { "epoch": 28.552, "grad_norm": 45.34379577636719, "learning_rate": 3.9724444444444444e-05, "loss": 2.3944, "step": 3569 }, { "epoch": 28.56, "grad_norm": 26.08582878112793, "learning_rate": 3.972e-05, "loss": 1.0837, "step": 3570 }, { "epoch": 28.568, "grad_norm": 32.45763397216797, "learning_rate": 3.9715555555555554e-05, "loss": 1.3624, "step": 3571 }, { "epoch": 28.576, "grad_norm": 18.594539642333984, "learning_rate": 3.9711111111111116e-05, "loss": 1.4037, "step": 3572 }, { "epoch": 28.584, "grad_norm": 29.80754852294922, "learning_rate": 3.970666666666667e-05, "loss": 1.1701, "step": 3573 }, { "epoch": 28.592, "grad_norm": 27.27749252319336, "learning_rate": 3.9702222222222225e-05, "loss": 1.1422, "step": 3574 }, { "epoch": 28.6, "grad_norm": 18.644887924194336, "learning_rate": 3.969777777777778e-05, "loss": 1.4509, "step": 3575 }, { "epoch": 28.608, "grad_norm": 23.733922958374023, "learning_rate": 3.9693333333333335e-05, "loss": 1.3956, "step": 3576 }, { "epoch": 28.616, "grad_norm": 47.27302551269531, "learning_rate": 3.968888888888889e-05, "loss": 1.6729, "step": 3577 }, { "epoch": 28.624, "grad_norm": 14.108521461486816, "learning_rate": 3.9684444444444445e-05, "loss": 1.0224, "step": 3578 }, { "epoch": 28.632, "grad_norm": 22.42069435119629, "learning_rate": 3.968e-05, "loss": 1.0947, "step": 3579 }, { "epoch": 28.64, "grad_norm": 26.943096160888672, "learning_rate": 3.9675555555555554e-05, "loss": 1.1436, "step": 3580 }, { "epoch": 28.648, "grad_norm": 28.23189353942871, "learning_rate": 3.9671111111111116e-05, "loss": 1.241, "step": 3581 }, { "epoch": 28.656, "grad_norm": 85.68072509765625, "learning_rate": 3.966666666666667e-05, "loss": 1.0788, "step": 3582 }, { "epoch": 28.664, "grad_norm": 41.033714294433594, "learning_rate": 3.9662222222222226e-05, "loss": 1.21, "step": 3583 }, { "epoch": 28.672, "grad_norm": 35.79884719848633, "learning_rate": 3.9657777777777774e-05, "loss": 1.2553, "step": 3584 }, { "epoch": 28.68, "grad_norm": 1047.1650390625, "learning_rate": 3.9653333333333335e-05, "loss": 1.494, "step": 3585 }, { "epoch": 28.688, "grad_norm": 39.191890716552734, "learning_rate": 3.964888888888889e-05, "loss": 1.217, "step": 3586 }, { "epoch": 28.696, "grad_norm": 22.659549713134766, "learning_rate": 3.9644444444444445e-05, "loss": 1.7659, "step": 3587 }, { "epoch": 28.704, "grad_norm": 36.58673858642578, "learning_rate": 3.964e-05, "loss": 1.599, "step": 3588 }, { "epoch": 28.712, "grad_norm": 33.436702728271484, "learning_rate": 3.963555555555556e-05, "loss": 1.1482, "step": 3589 }, { "epoch": 28.72, "grad_norm": 51.98210144042969, "learning_rate": 3.9631111111111116e-05, "loss": 1.2978, "step": 3590 }, { "epoch": 28.728, "grad_norm": 133.3426513671875, "learning_rate": 3.9626666666666664e-05, "loss": 1.3257, "step": 3591 }, { "epoch": 28.736, "grad_norm": 26.673831939697266, "learning_rate": 3.962222222222222e-05, "loss": 1.2036, "step": 3592 }, { "epoch": 28.744, "grad_norm": 24.55518341064453, "learning_rate": 3.961777777777778e-05, "loss": 1.0517, "step": 3593 }, { "epoch": 28.752, "grad_norm": 21.461395263671875, "learning_rate": 3.9613333333333336e-05, "loss": 0.9338, "step": 3594 }, { "epoch": 28.76, "grad_norm": 146.7390594482422, "learning_rate": 3.960888888888889e-05, "loss": 1.2595, "step": 3595 }, { "epoch": 28.768, "grad_norm": 27.72336196899414, "learning_rate": 3.9604444444444445e-05, "loss": 1.1475, "step": 3596 }, { "epoch": 28.776, "grad_norm": 41.452816009521484, "learning_rate": 3.960000000000001e-05, "loss": 1.538, "step": 3597 }, { "epoch": 28.784, "grad_norm": 31.38089942932129, "learning_rate": 3.9595555555555555e-05, "loss": 1.7542, "step": 3598 }, { "epoch": 28.792, "grad_norm": 38.753875732421875, "learning_rate": 3.959111111111111e-05, "loss": 1.0883, "step": 3599 }, { "epoch": 28.8, "grad_norm": 47.395084381103516, "learning_rate": 3.9586666666666665e-05, "loss": 1.4041, "step": 3600 }, { "epoch": 28.808, "grad_norm": 39.76588439941406, "learning_rate": 3.9582222222222226e-05, "loss": 1.0675, "step": 3601 }, { "epoch": 28.816, "grad_norm": 48.839820861816406, "learning_rate": 3.957777777777778e-05, "loss": 0.8558, "step": 3602 }, { "epoch": 28.824, "grad_norm": 33.58424377441406, "learning_rate": 3.9573333333333336e-05, "loss": 1.9958, "step": 3603 }, { "epoch": 28.832, "grad_norm": 22.258384704589844, "learning_rate": 3.956888888888889e-05, "loss": 1.2592, "step": 3604 }, { "epoch": 28.84, "grad_norm": 38.20815658569336, "learning_rate": 3.9564444444444446e-05, "loss": 1.0951, "step": 3605 }, { "epoch": 28.848, "grad_norm": 27.691226959228516, "learning_rate": 3.956e-05, "loss": 1.1389, "step": 3606 }, { "epoch": 28.856, "grad_norm": 41.67795944213867, "learning_rate": 3.9555555555555556e-05, "loss": 1.1642, "step": 3607 }, { "epoch": 28.864, "grad_norm": 30.433319091796875, "learning_rate": 3.955111111111111e-05, "loss": 1.1921, "step": 3608 }, { "epoch": 28.872, "grad_norm": 37.516353607177734, "learning_rate": 3.954666666666667e-05, "loss": 1.2096, "step": 3609 }, { "epoch": 28.88, "grad_norm": 44.03755569458008, "learning_rate": 3.954222222222223e-05, "loss": 1.085, "step": 3610 }, { "epoch": 28.888, "grad_norm": 27.19696617126465, "learning_rate": 3.953777777777778e-05, "loss": 1.6344, "step": 3611 }, { "epoch": 28.896, "grad_norm": 49.15432357788086, "learning_rate": 3.9533333333333337e-05, "loss": 1.1732, "step": 3612 }, { "epoch": 28.904, "grad_norm": 23.273542404174805, "learning_rate": 3.952888888888889e-05, "loss": 0.9137, "step": 3613 }, { "epoch": 28.912, "grad_norm": 29.055234909057617, "learning_rate": 3.9524444444444446e-05, "loss": 0.9739, "step": 3614 }, { "epoch": 28.92, "grad_norm": 19.718647003173828, "learning_rate": 3.952e-05, "loss": 1.7115, "step": 3615 }, { "epoch": 28.928, "grad_norm": 27.97213363647461, "learning_rate": 3.9515555555555556e-05, "loss": 1.0227, "step": 3616 }, { "epoch": 28.936, "grad_norm": 42.071109771728516, "learning_rate": 3.951111111111112e-05, "loss": 0.9273, "step": 3617 }, { "epoch": 28.944, "grad_norm": 27.93745231628418, "learning_rate": 3.950666666666667e-05, "loss": 2.4645, "step": 3618 }, { "epoch": 28.951999999999998, "grad_norm": 31.420839309692383, "learning_rate": 3.950222222222222e-05, "loss": 1.275, "step": 3619 }, { "epoch": 28.96, "grad_norm": 67.93079376220703, "learning_rate": 3.9497777777777775e-05, "loss": 1.1494, "step": 3620 }, { "epoch": 28.968, "grad_norm": 67.84270477294922, "learning_rate": 3.949333333333334e-05, "loss": 1.2249, "step": 3621 }, { "epoch": 28.976, "grad_norm": 142.3758087158203, "learning_rate": 3.948888888888889e-05, "loss": 0.8559, "step": 3622 }, { "epoch": 28.984, "grad_norm": 40.76852035522461, "learning_rate": 3.948444444444445e-05, "loss": 1.348, "step": 3623 }, { "epoch": 28.992, "grad_norm": 36.22665786743164, "learning_rate": 3.948e-05, "loss": 0.9644, "step": 3624 }, { "epoch": 29.0, "grad_norm": 48.63462829589844, "learning_rate": 3.947555555555556e-05, "loss": 1.1789, "step": 3625 }, { "epoch": 29.0, "eval_loss": 1.2498313188552856, "eval_map": 0.2987, "eval_map_50": 0.619, "eval_map_75": 0.2752, "eval_map_Coverall": 0.5368, "eval_map_Face_Shield": 0.3627, "eval_map_Gloves": 0.1817, "eval_map_Goggles": 0.1029, "eval_map_Mask": 0.3095, "eval_map_large": 0.4874, "eval_map_medium": 0.2695, "eval_map_small": 0.1606, "eval_mar_1": 0.2537, "eval_mar_10": 0.4854, "eval_mar_100": 0.5226, "eval_mar_100_Coverall": 0.7156, "eval_mar_100_Face_Shield": 0.6471, "eval_mar_100_Gloves": 0.359, "eval_mar_100_Goggles": 0.4625, "eval_mar_100_Mask": 0.4288, "eval_mar_large": 0.6925, "eval_mar_medium": 0.4634, "eval_mar_small": 0.2651, "eval_runtime": 0.9072, "eval_samples_per_second": 31.968, "eval_steps_per_second": 2.205, "step": 3625 }, { "epoch": 29.008, "grad_norm": 21.270639419555664, "learning_rate": 3.947111111111111e-05, "loss": 1.0199, "step": 3626 }, { "epoch": 29.016, "grad_norm": 20.384960174560547, "learning_rate": 3.9466666666666666e-05, "loss": 1.0569, "step": 3627 }, { "epoch": 29.024, "grad_norm": 45.16531753540039, "learning_rate": 3.946222222222222e-05, "loss": 1.2948, "step": 3628 }, { "epoch": 29.032, "grad_norm": 67.20779418945312, "learning_rate": 3.945777777777778e-05, "loss": 1.2051, "step": 3629 }, { "epoch": 29.04, "grad_norm": 34.31087875366211, "learning_rate": 3.945333333333334e-05, "loss": 1.3369, "step": 3630 }, { "epoch": 29.048, "grad_norm": 46.05363464355469, "learning_rate": 3.944888888888889e-05, "loss": 1.0392, "step": 3631 }, { "epoch": 29.056, "grad_norm": 33.44265365600586, "learning_rate": 3.944444444444445e-05, "loss": 1.3997, "step": 3632 }, { "epoch": 29.064, "grad_norm": 43.6870231628418, "learning_rate": 3.944e-05, "loss": 0.9733, "step": 3633 }, { "epoch": 29.072, "grad_norm": 22.399459838867188, "learning_rate": 3.943555555555556e-05, "loss": 1.6149, "step": 3634 }, { "epoch": 29.08, "grad_norm": 28.278968811035156, "learning_rate": 3.943111111111111e-05, "loss": 1.465, "step": 3635 }, { "epoch": 29.088, "grad_norm": 31.67029571533203, "learning_rate": 3.9426666666666666e-05, "loss": 0.9107, "step": 3636 }, { "epoch": 29.096, "grad_norm": 106.9617919921875, "learning_rate": 3.942222222222222e-05, "loss": 2.5308, "step": 3637 }, { "epoch": 29.104, "grad_norm": 29.838516235351562, "learning_rate": 3.941777777777778e-05, "loss": 1.168, "step": 3638 }, { "epoch": 29.112, "grad_norm": 80.24748229980469, "learning_rate": 3.941333333333334e-05, "loss": 1.0844, "step": 3639 }, { "epoch": 29.12, "grad_norm": 36.3046989440918, "learning_rate": 3.940888888888889e-05, "loss": 1.1432, "step": 3640 }, { "epoch": 29.128, "grad_norm": 36.18122863769531, "learning_rate": 3.940444444444444e-05, "loss": 1.0228, "step": 3641 }, { "epoch": 29.136, "grad_norm": 34.24589538574219, "learning_rate": 3.94e-05, "loss": 2.2346, "step": 3642 }, { "epoch": 29.144, "grad_norm": 41.998294830322266, "learning_rate": 3.939555555555556e-05, "loss": 1.7074, "step": 3643 }, { "epoch": 29.152, "grad_norm": 104.5133285522461, "learning_rate": 3.939111111111111e-05, "loss": 1.0476, "step": 3644 }, { "epoch": 29.16, "grad_norm": 62.49032974243164, "learning_rate": 3.938666666666667e-05, "loss": 1.2744, "step": 3645 }, { "epoch": 29.168, "grad_norm": 16.75449562072754, "learning_rate": 3.938222222222223e-05, "loss": 1.004, "step": 3646 }, { "epoch": 29.176, "grad_norm": 16.06818389892578, "learning_rate": 3.937777777777778e-05, "loss": 1.0927, "step": 3647 }, { "epoch": 29.184, "grad_norm": 15.532025337219238, "learning_rate": 3.937333333333333e-05, "loss": 1.0911, "step": 3648 }, { "epoch": 29.192, "grad_norm": 31.3184871673584, "learning_rate": 3.9368888888888886e-05, "loss": 1.2901, "step": 3649 }, { "epoch": 29.2, "grad_norm": 31.130342483520508, "learning_rate": 3.936444444444445e-05, "loss": 1.0836, "step": 3650 }, { "epoch": 29.208, "grad_norm": 20.698999404907227, "learning_rate": 3.936e-05, "loss": 0.9844, "step": 3651 }, { "epoch": 29.216, "grad_norm": 27.07410430908203, "learning_rate": 3.935555555555556e-05, "loss": 0.9023, "step": 3652 }, { "epoch": 29.224, "grad_norm": 25.398223876953125, "learning_rate": 3.935111111111111e-05, "loss": 1.1211, "step": 3653 }, { "epoch": 29.232, "grad_norm": 23.4410457611084, "learning_rate": 3.9346666666666674e-05, "loss": 1.4994, "step": 3654 }, { "epoch": 29.24, "grad_norm": 27.382282257080078, "learning_rate": 3.934222222222222e-05, "loss": 0.9428, "step": 3655 }, { "epoch": 29.248, "grad_norm": 70.03271484375, "learning_rate": 3.933777777777778e-05, "loss": 0.9713, "step": 3656 }, { "epoch": 29.256, "grad_norm": 33.73957824707031, "learning_rate": 3.933333333333333e-05, "loss": 1.0989, "step": 3657 }, { "epoch": 29.264, "grad_norm": 26.73456573486328, "learning_rate": 3.932888888888889e-05, "loss": 1.324, "step": 3658 }, { "epoch": 29.272, "grad_norm": 24.64491844177246, "learning_rate": 3.932444444444445e-05, "loss": 1.0142, "step": 3659 }, { "epoch": 29.28, "grad_norm": 28.201522827148438, "learning_rate": 3.932e-05, "loss": 1.6254, "step": 3660 }, { "epoch": 29.288, "grad_norm": 26.399696350097656, "learning_rate": 3.931555555555556e-05, "loss": 1.3324, "step": 3661 }, { "epoch": 29.296, "grad_norm": 24.28162384033203, "learning_rate": 3.931111111111111e-05, "loss": 1.175, "step": 3662 }, { "epoch": 29.304, "grad_norm": 38.905601501464844, "learning_rate": 3.930666666666667e-05, "loss": 1.1974, "step": 3663 }, { "epoch": 29.312, "grad_norm": 48.302207946777344, "learning_rate": 3.930222222222222e-05, "loss": 1.3087, "step": 3664 }, { "epoch": 29.32, "grad_norm": 44.38668441772461, "learning_rate": 3.929777777777778e-05, "loss": 1.4552, "step": 3665 }, { "epoch": 29.328, "grad_norm": 26.44339942932129, "learning_rate": 3.929333333333334e-05, "loss": 1.3023, "step": 3666 }, { "epoch": 29.336, "grad_norm": 19.5550594329834, "learning_rate": 3.9288888888888894e-05, "loss": 1.2162, "step": 3667 }, { "epoch": 29.344, "grad_norm": 21.498245239257812, "learning_rate": 3.928444444444445e-05, "loss": 0.9615, "step": 3668 }, { "epoch": 29.352, "grad_norm": 22.491748809814453, "learning_rate": 3.9280000000000003e-05, "loss": 0.9169, "step": 3669 }, { "epoch": 29.36, "grad_norm": 21.876611709594727, "learning_rate": 3.927555555555556e-05, "loss": 0.8766, "step": 3670 }, { "epoch": 29.368, "grad_norm": 33.537418365478516, "learning_rate": 3.927111111111111e-05, "loss": 0.9353, "step": 3671 }, { "epoch": 29.376, "grad_norm": 18.938251495361328, "learning_rate": 3.926666666666667e-05, "loss": 1.1537, "step": 3672 }, { "epoch": 29.384, "grad_norm": 17.32370948791504, "learning_rate": 3.926222222222222e-05, "loss": 2.2716, "step": 3673 }, { "epoch": 29.392, "grad_norm": 50.305992126464844, "learning_rate": 3.9257777777777784e-05, "loss": 1.3164, "step": 3674 }, { "epoch": 29.4, "grad_norm": 77.80621337890625, "learning_rate": 3.925333333333334e-05, "loss": 1.1695, "step": 3675 }, { "epoch": 29.408, "grad_norm": 18.575613021850586, "learning_rate": 3.924888888888889e-05, "loss": 1.2252, "step": 3676 }, { "epoch": 29.416, "grad_norm": 26.385879516601562, "learning_rate": 3.924444444444444e-05, "loss": 1.3303, "step": 3677 }, { "epoch": 29.424, "grad_norm": 28.178014755249023, "learning_rate": 3.9240000000000004e-05, "loss": 1.6032, "step": 3678 }, { "epoch": 29.432, "grad_norm": 21.596839904785156, "learning_rate": 3.923555555555556e-05, "loss": 1.6152, "step": 3679 }, { "epoch": 29.44, "grad_norm": 26.399700164794922, "learning_rate": 3.9231111111111113e-05, "loss": 1.5229, "step": 3680 }, { "epoch": 29.448, "grad_norm": 20.661510467529297, "learning_rate": 3.922666666666667e-05, "loss": 1.1869, "step": 3681 }, { "epoch": 29.456, "grad_norm": 23.295501708984375, "learning_rate": 3.922222222222223e-05, "loss": 1.1243, "step": 3682 }, { "epoch": 29.464, "grad_norm": 37.71562576293945, "learning_rate": 3.921777777777778e-05, "loss": 1.296, "step": 3683 }, { "epoch": 29.472, "grad_norm": 35.7359733581543, "learning_rate": 3.921333333333333e-05, "loss": 1.5487, "step": 3684 }, { "epoch": 29.48, "grad_norm": 20.059776306152344, "learning_rate": 3.920888888888889e-05, "loss": 1.3285, "step": 3685 }, { "epoch": 29.488, "grad_norm": 35.015419006347656, "learning_rate": 3.920444444444444e-05, "loss": 1.4131, "step": 3686 }, { "epoch": 29.496, "grad_norm": 23.95197296142578, "learning_rate": 3.9200000000000004e-05, "loss": 1.1074, "step": 3687 }, { "epoch": 29.504, "grad_norm": 28.2961368560791, "learning_rate": 3.919555555555556e-05, "loss": 1.1659, "step": 3688 }, { "epoch": 29.512, "grad_norm": 20.989294052124023, "learning_rate": 3.9191111111111114e-05, "loss": 1.1657, "step": 3689 }, { "epoch": 29.52, "grad_norm": 38.15180587768555, "learning_rate": 3.918666666666667e-05, "loss": 1.1733, "step": 3690 }, { "epoch": 29.528, "grad_norm": 16.91005516052246, "learning_rate": 3.9182222222222224e-05, "loss": 0.8295, "step": 3691 }, { "epoch": 29.536, "grad_norm": 26.4747314453125, "learning_rate": 3.917777777777778e-05, "loss": 1.412, "step": 3692 }, { "epoch": 29.544, "grad_norm": 12.938889503479004, "learning_rate": 3.917333333333333e-05, "loss": 0.9727, "step": 3693 }, { "epoch": 29.552, "grad_norm": 25.979097366333008, "learning_rate": 3.916888888888889e-05, "loss": 1.2447, "step": 3694 }, { "epoch": 29.56, "grad_norm": 30.463056564331055, "learning_rate": 3.916444444444445e-05, "loss": 1.1577, "step": 3695 }, { "epoch": 29.568, "grad_norm": 28.2266902923584, "learning_rate": 3.9160000000000005e-05, "loss": 1.8185, "step": 3696 }, { "epoch": 29.576, "grad_norm": 27.265487670898438, "learning_rate": 3.915555555555556e-05, "loss": 1.2972, "step": 3697 }, { "epoch": 29.584, "grad_norm": 27.35637092590332, "learning_rate": 3.915111111111111e-05, "loss": 1.3662, "step": 3698 }, { "epoch": 29.592, "grad_norm": 30.551851272583008, "learning_rate": 3.914666666666667e-05, "loss": 1.1694, "step": 3699 }, { "epoch": 29.6, "grad_norm": 43.819175720214844, "learning_rate": 3.9142222222222224e-05, "loss": 1.2884, "step": 3700 }, { "epoch": 29.608, "grad_norm": 56.977352142333984, "learning_rate": 3.913777777777778e-05, "loss": 1.3247, "step": 3701 }, { "epoch": 29.616, "grad_norm": 32.810890197753906, "learning_rate": 3.9133333333333334e-05, "loss": 0.912, "step": 3702 }, { "epoch": 29.624, "grad_norm": 44.93788146972656, "learning_rate": 3.9128888888888895e-05, "loss": 1.4334, "step": 3703 }, { "epoch": 29.632, "grad_norm": 28.052202224731445, "learning_rate": 3.912444444444445e-05, "loss": 0.7822, "step": 3704 }, { "epoch": 29.64, "grad_norm": 34.5224723815918, "learning_rate": 3.912e-05, "loss": 1.6142, "step": 3705 }, { "epoch": 29.648, "grad_norm": 59.31552505493164, "learning_rate": 3.911555555555555e-05, "loss": 1.1423, "step": 3706 }, { "epoch": 29.656, "grad_norm": 23.41785430908203, "learning_rate": 3.9111111111111115e-05, "loss": 0.9104, "step": 3707 }, { "epoch": 29.664, "grad_norm": 45.67373275756836, "learning_rate": 3.910666666666667e-05, "loss": 1.8626, "step": 3708 }, { "epoch": 29.672, "grad_norm": 24.328514099121094, "learning_rate": 3.9102222222222224e-05, "loss": 0.9922, "step": 3709 }, { "epoch": 29.68, "grad_norm": 160.4320831298828, "learning_rate": 3.909777777777778e-05, "loss": 1.3353, "step": 3710 }, { "epoch": 29.688, "grad_norm": 32.86115646362305, "learning_rate": 3.9093333333333334e-05, "loss": 1.3418, "step": 3711 }, { "epoch": 29.696, "grad_norm": 22.038894653320312, "learning_rate": 3.908888888888889e-05, "loss": 1.2272, "step": 3712 }, { "epoch": 29.704, "grad_norm": 25.55476188659668, "learning_rate": 3.9084444444444444e-05, "loss": 1.1193, "step": 3713 }, { "epoch": 29.712, "grad_norm": 44.57835006713867, "learning_rate": 3.908e-05, "loss": 1.1034, "step": 3714 }, { "epoch": 29.72, "grad_norm": 26.38947296142578, "learning_rate": 3.907555555555556e-05, "loss": 1.2476, "step": 3715 }, { "epoch": 29.728, "grad_norm": 114.98371124267578, "learning_rate": 3.9071111111111115e-05, "loss": 1.0378, "step": 3716 }, { "epoch": 29.736, "grad_norm": 62.36959457397461, "learning_rate": 3.906666666666667e-05, "loss": 1.3707, "step": 3717 }, { "epoch": 29.744, "grad_norm": 24.121715545654297, "learning_rate": 3.9062222222222225e-05, "loss": 0.9683, "step": 3718 }, { "epoch": 29.752, "grad_norm": 34.49618911743164, "learning_rate": 3.905777777777778e-05, "loss": 1.1544, "step": 3719 }, { "epoch": 29.76, "grad_norm": 37.35868835449219, "learning_rate": 3.9053333333333334e-05, "loss": 1.2385, "step": 3720 }, { "epoch": 29.768, "grad_norm": 38.83354187011719, "learning_rate": 3.904888888888889e-05, "loss": 1.7026, "step": 3721 }, { "epoch": 29.776, "grad_norm": 562.3043212890625, "learning_rate": 3.9044444444444444e-05, "loss": 1.0474, "step": 3722 }, { "epoch": 29.784, "grad_norm": 17.17640495300293, "learning_rate": 3.9040000000000006e-05, "loss": 1.4873, "step": 3723 }, { "epoch": 29.792, "grad_norm": 47.52167510986328, "learning_rate": 3.903555555555556e-05, "loss": 0.9692, "step": 3724 }, { "epoch": 29.8, "grad_norm": 46.223304748535156, "learning_rate": 3.9031111111111115e-05, "loss": 1.3847, "step": 3725 }, { "epoch": 29.808, "grad_norm": 31.39592742919922, "learning_rate": 3.902666666666667e-05, "loss": 1.0112, "step": 3726 }, { "epoch": 29.816, "grad_norm": 22.76755714416504, "learning_rate": 3.9022222222222225e-05, "loss": 1.2274, "step": 3727 }, { "epoch": 29.824, "grad_norm": 48.4031867980957, "learning_rate": 3.901777777777778e-05, "loss": 1.197, "step": 3728 }, { "epoch": 29.832, "grad_norm": 70.0137939453125, "learning_rate": 3.9013333333333335e-05, "loss": 1.5323, "step": 3729 }, { "epoch": 29.84, "grad_norm": 23.24966812133789, "learning_rate": 3.900888888888889e-05, "loss": 2.1767, "step": 3730 }, { "epoch": 29.848, "grad_norm": 30.40225601196289, "learning_rate": 3.900444444444445e-05, "loss": 1.8077, "step": 3731 }, { "epoch": 29.856, "grad_norm": 38.226661682128906, "learning_rate": 3.9000000000000006e-05, "loss": 1.1315, "step": 3732 }, { "epoch": 29.864, "grad_norm": 31.640905380249023, "learning_rate": 3.8995555555555554e-05, "loss": 1.5231, "step": 3733 }, { "epoch": 29.872, "grad_norm": 28.891098022460938, "learning_rate": 3.899111111111111e-05, "loss": 1.3272, "step": 3734 }, { "epoch": 29.88, "grad_norm": 54.85975646972656, "learning_rate": 3.8986666666666664e-05, "loss": 1.1451, "step": 3735 }, { "epoch": 29.888, "grad_norm": 14.174991607666016, "learning_rate": 3.8982222222222225e-05, "loss": 0.8883, "step": 3736 }, { "epoch": 29.896, "grad_norm": 286.6719055175781, "learning_rate": 3.897777777777778e-05, "loss": 1.4995, "step": 3737 }, { "epoch": 29.904, "grad_norm": 43.87775421142578, "learning_rate": 3.8973333333333335e-05, "loss": 2.7987, "step": 3738 }, { "epoch": 29.912, "grad_norm": 24.39453125, "learning_rate": 3.896888888888889e-05, "loss": 1.3382, "step": 3739 }, { "epoch": 29.92, "grad_norm": 20.810184478759766, "learning_rate": 3.8964444444444445e-05, "loss": 0.8836, "step": 3740 }, { "epoch": 29.928, "grad_norm": 31.959726333618164, "learning_rate": 3.896e-05, "loss": 0.7029, "step": 3741 }, { "epoch": 29.936, "grad_norm": 70.57742309570312, "learning_rate": 3.8955555555555555e-05, "loss": 1.6199, "step": 3742 }, { "epoch": 29.944, "grad_norm": 51.839256286621094, "learning_rate": 3.895111111111111e-05, "loss": 1.071, "step": 3743 }, { "epoch": 29.951999999999998, "grad_norm": 40.25831985473633, "learning_rate": 3.894666666666667e-05, "loss": 1.2595, "step": 3744 }, { "epoch": 29.96, "grad_norm": 49.659297943115234, "learning_rate": 3.8942222222222226e-05, "loss": 1.7814, "step": 3745 }, { "epoch": 29.968, "grad_norm": 282.8426818847656, "learning_rate": 3.893777777777778e-05, "loss": 1.4854, "step": 3746 }, { "epoch": 29.976, "grad_norm": 29.120229721069336, "learning_rate": 3.8933333333333336e-05, "loss": 1.2698, "step": 3747 }, { "epoch": 29.984, "grad_norm": 36.19181442260742, "learning_rate": 3.892888888888889e-05, "loss": 1.5027, "step": 3748 }, { "epoch": 29.992, "grad_norm": 39.437156677246094, "learning_rate": 3.8924444444444445e-05, "loss": 1.2857, "step": 3749 }, { "epoch": 30.0, "grad_norm": 57.102500915527344, "learning_rate": 3.892e-05, "loss": 1.185, "step": 3750 }, { "epoch": 30.0, "eval_loss": 1.340280294418335, "eval_map": 0.2659, "eval_map_50": 0.5572, "eval_map_75": 0.2052, "eval_map_Coverall": 0.4924, "eval_map_Face_Shield": 0.2557, "eval_map_Gloves": 0.2118, "eval_map_Goggles": 0.1286, "eval_map_Mask": 0.241, "eval_map_large": 0.4568, "eval_map_medium": 0.1622, "eval_map_small": 0.1514, "eval_mar_1": 0.2527, "eval_mar_10": 0.4691, "eval_mar_100": 0.4925, "eval_mar_100_Coverall": 0.6489, "eval_mar_100_Face_Shield": 0.6235, "eval_mar_100_Gloves": 0.3738, "eval_mar_100_Goggles": 0.4375, "eval_mar_100_Mask": 0.3788, "eval_mar_large": 0.715, "eval_mar_medium": 0.3673, "eval_mar_small": 0.2072, "eval_runtime": 0.9057, "eval_samples_per_second": 32.018, "eval_steps_per_second": 2.208, "step": 3750 }, { "epoch": 30.008, "grad_norm": 27.303627014160156, "learning_rate": 3.8915555555555555e-05, "loss": 1.4316, "step": 3751 }, { "epoch": 30.016, "grad_norm": 26.120878219604492, "learning_rate": 3.8911111111111117e-05, "loss": 1.1807, "step": 3752 }, { "epoch": 30.024, "grad_norm": 34.58171081542969, "learning_rate": 3.890666666666667e-05, "loss": 1.6899, "step": 3753 }, { "epoch": 30.032, "grad_norm": 31.721147537231445, "learning_rate": 3.8902222222222226e-05, "loss": 1.1319, "step": 3754 }, { "epoch": 30.04, "grad_norm": 26.484201431274414, "learning_rate": 3.8897777777777774e-05, "loss": 1.5255, "step": 3755 }, { "epoch": 30.048, "grad_norm": 63.97512435913086, "learning_rate": 3.8893333333333336e-05, "loss": 1.1126, "step": 3756 }, { "epoch": 30.056, "grad_norm": 103.68257141113281, "learning_rate": 3.888888888888889e-05, "loss": 1.5069, "step": 3757 }, { "epoch": 30.064, "grad_norm": 39.904788970947266, "learning_rate": 3.8884444444444446e-05, "loss": 1.2644, "step": 3758 }, { "epoch": 30.072, "grad_norm": 36.92420959472656, "learning_rate": 3.888e-05, "loss": 1.6612, "step": 3759 }, { "epoch": 30.08, "grad_norm": 26.267133712768555, "learning_rate": 3.887555555555556e-05, "loss": 0.9425, "step": 3760 }, { "epoch": 30.088, "grad_norm": 26.15834617614746, "learning_rate": 3.887111111111112e-05, "loss": 1.6593, "step": 3761 }, { "epoch": 30.096, "grad_norm": 39.4759521484375, "learning_rate": 3.8866666666666665e-05, "loss": 1.5806, "step": 3762 }, { "epoch": 30.104, "grad_norm": 26.321311950683594, "learning_rate": 3.886222222222222e-05, "loss": 1.4411, "step": 3763 }, { "epoch": 30.112, "grad_norm": 19.377779006958008, "learning_rate": 3.885777777777778e-05, "loss": 1.1237, "step": 3764 }, { "epoch": 30.12, "grad_norm": 32.78801345825195, "learning_rate": 3.8853333333333336e-05, "loss": 1.4652, "step": 3765 }, { "epoch": 30.128, "grad_norm": 82.36917114257812, "learning_rate": 3.884888888888889e-05, "loss": 1.3249, "step": 3766 }, { "epoch": 30.136, "grad_norm": 58.340354919433594, "learning_rate": 3.8844444444444446e-05, "loss": 1.6362, "step": 3767 }, { "epoch": 30.144, "grad_norm": 19.10538101196289, "learning_rate": 3.884e-05, "loss": 1.73, "step": 3768 }, { "epoch": 30.152, "grad_norm": 23.58308982849121, "learning_rate": 3.8835555555555556e-05, "loss": 1.1644, "step": 3769 }, { "epoch": 30.16, "grad_norm": 18.90793228149414, "learning_rate": 3.883111111111111e-05, "loss": 1.1013, "step": 3770 }, { "epoch": 30.168, "grad_norm": 21.941783905029297, "learning_rate": 3.8826666666666665e-05, "loss": 0.9753, "step": 3771 }, { "epoch": 30.176, "grad_norm": 22.791200637817383, "learning_rate": 3.882222222222223e-05, "loss": 1.2207, "step": 3772 }, { "epoch": 30.184, "grad_norm": 57.079345703125, "learning_rate": 3.881777777777778e-05, "loss": 1.2728, "step": 3773 }, { "epoch": 30.192, "grad_norm": 18.232582092285156, "learning_rate": 3.881333333333334e-05, "loss": 1.5147, "step": 3774 }, { "epoch": 30.2, "grad_norm": 69.72171020507812, "learning_rate": 3.880888888888889e-05, "loss": 1.2269, "step": 3775 }, { "epoch": 30.208, "grad_norm": 36.12935256958008, "learning_rate": 3.8804444444444446e-05, "loss": 1.2638, "step": 3776 }, { "epoch": 30.216, "grad_norm": 38.17658996582031, "learning_rate": 3.88e-05, "loss": 1.2766, "step": 3777 }, { "epoch": 30.224, "grad_norm": 41.9587287902832, "learning_rate": 3.8795555555555556e-05, "loss": 1.1452, "step": 3778 }, { "epoch": 30.232, "grad_norm": 30.41119956970215, "learning_rate": 3.879111111111111e-05, "loss": 0.9548, "step": 3779 }, { "epoch": 30.24, "grad_norm": 44.92568588256836, "learning_rate": 3.878666666666667e-05, "loss": 1.2473, "step": 3780 }, { "epoch": 30.248, "grad_norm": 54.610538482666016, "learning_rate": 3.878222222222223e-05, "loss": 1.3874, "step": 3781 }, { "epoch": 30.256, "grad_norm": 31.565567016601562, "learning_rate": 3.877777777777778e-05, "loss": 1.2821, "step": 3782 }, { "epoch": 30.264, "grad_norm": 32.125770568847656, "learning_rate": 3.877333333333334e-05, "loss": 3.4597, "step": 3783 }, { "epoch": 30.272, "grad_norm": 76.27595520019531, "learning_rate": 3.876888888888889e-05, "loss": 1.1437, "step": 3784 }, { "epoch": 30.28, "grad_norm": 36.299869537353516, "learning_rate": 3.876444444444445e-05, "loss": 1.0194, "step": 3785 }, { "epoch": 30.288, "grad_norm": 36.606624603271484, "learning_rate": 3.876e-05, "loss": 1.2998, "step": 3786 }, { "epoch": 30.296, "grad_norm": 34.18950653076172, "learning_rate": 3.8755555555555556e-05, "loss": 1.3671, "step": 3787 }, { "epoch": 30.304, "grad_norm": 42.772640228271484, "learning_rate": 3.875111111111111e-05, "loss": 1.4458, "step": 3788 }, { "epoch": 30.312, "grad_norm": 61.037574768066406, "learning_rate": 3.874666666666667e-05, "loss": 1.3662, "step": 3789 }, { "epoch": 30.32, "grad_norm": 60.30972671508789, "learning_rate": 3.874222222222222e-05, "loss": 1.4704, "step": 3790 }, { "epoch": 30.328, "grad_norm": 18.322851181030273, "learning_rate": 3.8737777777777776e-05, "loss": 0.9749, "step": 3791 }, { "epoch": 30.336, "grad_norm": 147.5380859375, "learning_rate": 3.873333333333333e-05, "loss": 1.5863, "step": 3792 }, { "epoch": 30.344, "grad_norm": 27.780858993530273, "learning_rate": 3.872888888888889e-05, "loss": 2.2986, "step": 3793 }, { "epoch": 30.352, "grad_norm": 30.68075942993164, "learning_rate": 3.872444444444445e-05, "loss": 1.2414, "step": 3794 }, { "epoch": 30.36, "grad_norm": 27.504182815551758, "learning_rate": 3.872e-05, "loss": 1.1483, "step": 3795 }, { "epoch": 30.368, "grad_norm": 20.669923782348633, "learning_rate": 3.871555555555556e-05, "loss": 1.402, "step": 3796 }, { "epoch": 30.376, "grad_norm": 37.12040328979492, "learning_rate": 3.871111111111111e-05, "loss": 1.5369, "step": 3797 }, { "epoch": 30.384, "grad_norm": 27.664932250976562, "learning_rate": 3.8706666666666667e-05, "loss": 1.1054, "step": 3798 }, { "epoch": 30.392, "grad_norm": 64.52569580078125, "learning_rate": 3.870222222222222e-05, "loss": 1.2104, "step": 3799 }, { "epoch": 30.4, "grad_norm": 111.00208282470703, "learning_rate": 3.8697777777777776e-05, "loss": 1.1775, "step": 3800 }, { "epoch": 30.408, "grad_norm": 21.112565994262695, "learning_rate": 3.869333333333334e-05, "loss": 0.9681, "step": 3801 }, { "epoch": 30.416, "grad_norm": 31.909509658813477, "learning_rate": 3.868888888888889e-05, "loss": 0.9597, "step": 3802 }, { "epoch": 30.424, "grad_norm": 43.89980697631836, "learning_rate": 3.868444444444445e-05, "loss": 1.1073, "step": 3803 }, { "epoch": 30.432, "grad_norm": 35.33338928222656, "learning_rate": 3.868e-05, "loss": 1.0345, "step": 3804 }, { "epoch": 30.44, "grad_norm": 28.674169540405273, "learning_rate": 3.867555555555556e-05, "loss": 1.4351, "step": 3805 }, { "epoch": 30.448, "grad_norm": 16.314743041992188, "learning_rate": 3.867111111111111e-05, "loss": 0.9884, "step": 3806 }, { "epoch": 30.456, "grad_norm": 21.805397033691406, "learning_rate": 3.866666666666667e-05, "loss": 1.1369, "step": 3807 }, { "epoch": 30.464, "grad_norm": 22.167343139648438, "learning_rate": 3.866222222222222e-05, "loss": 1.2111, "step": 3808 }, { "epoch": 30.472, "grad_norm": 37.78203582763672, "learning_rate": 3.865777777777778e-05, "loss": 1.5128, "step": 3809 }, { "epoch": 30.48, "grad_norm": 88.23632049560547, "learning_rate": 3.865333333333334e-05, "loss": 1.5133, "step": 3810 }, { "epoch": 30.488, "grad_norm": 45.323577880859375, "learning_rate": 3.864888888888889e-05, "loss": 1.3983, "step": 3811 }, { "epoch": 30.496, "grad_norm": 22.75294303894043, "learning_rate": 3.864444444444444e-05, "loss": 1.3103, "step": 3812 }, { "epoch": 30.504, "grad_norm": 15.373635292053223, "learning_rate": 3.864e-05, "loss": 2.2743, "step": 3813 }, { "epoch": 30.512, "grad_norm": 28.066652297973633, "learning_rate": 3.863555555555556e-05, "loss": 1.0524, "step": 3814 }, { "epoch": 30.52, "grad_norm": 42.38616180419922, "learning_rate": 3.863111111111111e-05, "loss": 1.1614, "step": 3815 }, { "epoch": 30.528, "grad_norm": 37.732120513916016, "learning_rate": 3.862666666666667e-05, "loss": 1.5165, "step": 3816 }, { "epoch": 30.536, "grad_norm": 39.360050201416016, "learning_rate": 3.862222222222223e-05, "loss": 1.3886, "step": 3817 }, { "epoch": 30.544, "grad_norm": 34.427024841308594, "learning_rate": 3.8617777777777784e-05, "loss": 1.1053, "step": 3818 }, { "epoch": 30.552, "grad_norm": 22.88673973083496, "learning_rate": 3.861333333333333e-05, "loss": 1.067, "step": 3819 }, { "epoch": 30.56, "grad_norm": 26.946577072143555, "learning_rate": 3.860888888888889e-05, "loss": 1.5037, "step": 3820 }, { "epoch": 30.568, "grad_norm": 21.888412475585938, "learning_rate": 3.860444444444445e-05, "loss": 1.2062, "step": 3821 }, { "epoch": 30.576, "grad_norm": 34.60844802856445, "learning_rate": 3.86e-05, "loss": 1.1503, "step": 3822 }, { "epoch": 30.584, "grad_norm": 33.6983528137207, "learning_rate": 3.859555555555556e-05, "loss": 1.2667, "step": 3823 }, { "epoch": 30.592, "grad_norm": 22.507678985595703, "learning_rate": 3.859111111111111e-05, "loss": 2.236, "step": 3824 }, { "epoch": 30.6, "grad_norm": 53.34096908569336, "learning_rate": 3.858666666666667e-05, "loss": 1.0725, "step": 3825 }, { "epoch": 30.608, "grad_norm": 22.84440803527832, "learning_rate": 3.858222222222222e-05, "loss": 1.2933, "step": 3826 }, { "epoch": 30.616, "grad_norm": 16.34496307373047, "learning_rate": 3.857777777777778e-05, "loss": 1.0712, "step": 3827 }, { "epoch": 30.624, "grad_norm": 29.05224609375, "learning_rate": 3.857333333333333e-05, "loss": 1.4045, "step": 3828 }, { "epoch": 30.632, "grad_norm": 18.412187576293945, "learning_rate": 3.8568888888888894e-05, "loss": 1.0857, "step": 3829 }, { "epoch": 30.64, "grad_norm": 49.6131477355957, "learning_rate": 3.856444444444445e-05, "loss": 1.9335, "step": 3830 }, { "epoch": 30.648, "grad_norm": 29.146434783935547, "learning_rate": 3.8560000000000004e-05, "loss": 1.0209, "step": 3831 }, { "epoch": 30.656, "grad_norm": 41.83575439453125, "learning_rate": 3.855555555555556e-05, "loss": 1.2344, "step": 3832 }, { "epoch": 30.664, "grad_norm": 32.54252624511719, "learning_rate": 3.855111111111111e-05, "loss": 1.0682, "step": 3833 }, { "epoch": 30.672, "grad_norm": 23.578327178955078, "learning_rate": 3.854666666666667e-05, "loss": 1.3242, "step": 3834 }, { "epoch": 30.68, "grad_norm": 22.4285945892334, "learning_rate": 3.854222222222222e-05, "loss": 1.2039, "step": 3835 }, { "epoch": 30.688, "grad_norm": 15.56036376953125, "learning_rate": 3.853777777777778e-05, "loss": 1.3146, "step": 3836 }, { "epoch": 30.696, "grad_norm": 58.06399917602539, "learning_rate": 3.853333333333334e-05, "loss": 1.0668, "step": 3837 }, { "epoch": 30.704, "grad_norm": 47.327701568603516, "learning_rate": 3.8528888888888894e-05, "loss": 1.3386, "step": 3838 }, { "epoch": 30.712, "grad_norm": 35.34457778930664, "learning_rate": 3.852444444444445e-05, "loss": 1.2121, "step": 3839 }, { "epoch": 30.72, "grad_norm": 35.02259063720703, "learning_rate": 3.8520000000000004e-05, "loss": 1.1628, "step": 3840 }, { "epoch": 30.728, "grad_norm": 28.582191467285156, "learning_rate": 3.851555555555555e-05, "loss": 1.1407, "step": 3841 }, { "epoch": 30.736, "grad_norm": 23.1986026763916, "learning_rate": 3.8511111111111114e-05, "loss": 1.1869, "step": 3842 }, { "epoch": 30.744, "grad_norm": 28.87908172607422, "learning_rate": 3.850666666666667e-05, "loss": 1.3172, "step": 3843 }, { "epoch": 30.752, "grad_norm": 20.145891189575195, "learning_rate": 3.850222222222222e-05, "loss": 0.9861, "step": 3844 }, { "epoch": 30.76, "grad_norm": 28.8463134765625, "learning_rate": 3.849777777777778e-05, "loss": 1.8496, "step": 3845 }, { "epoch": 30.768, "grad_norm": 35.279624938964844, "learning_rate": 3.849333333333334e-05, "loss": 1.2927, "step": 3846 }, { "epoch": 30.776, "grad_norm": 36.80277633666992, "learning_rate": 3.848888888888889e-05, "loss": 1.09, "step": 3847 }, { "epoch": 30.784, "grad_norm": 69.88172149658203, "learning_rate": 3.848444444444444e-05, "loss": 1.141, "step": 3848 }, { "epoch": 30.792, "grad_norm": 31.94663429260254, "learning_rate": 3.848e-05, "loss": 1.6491, "step": 3849 }, { "epoch": 30.8, "grad_norm": 38.30421447753906, "learning_rate": 3.847555555555556e-05, "loss": 1.9821, "step": 3850 }, { "epoch": 30.808, "grad_norm": 77.70416259765625, "learning_rate": 3.8471111111111114e-05, "loss": 1.4919, "step": 3851 }, { "epoch": 30.816, "grad_norm": 16.434730529785156, "learning_rate": 3.846666666666667e-05, "loss": 1.0662, "step": 3852 }, { "epoch": 30.824, "grad_norm": 18.756650924682617, "learning_rate": 3.8462222222222224e-05, "loss": 1.4222, "step": 3853 }, { "epoch": 30.832, "grad_norm": 134.865966796875, "learning_rate": 3.845777777777778e-05, "loss": 1.584, "step": 3854 }, { "epoch": 30.84, "grad_norm": 34.342735290527344, "learning_rate": 3.845333333333333e-05, "loss": 1.4355, "step": 3855 }, { "epoch": 30.848, "grad_norm": 27.86723518371582, "learning_rate": 3.844888888888889e-05, "loss": 0.6325, "step": 3856 }, { "epoch": 30.856, "grad_norm": 360.69805908203125, "learning_rate": 3.844444444444444e-05, "loss": 1.4111, "step": 3857 }, { "epoch": 30.864, "grad_norm": 17.534198760986328, "learning_rate": 3.8440000000000005e-05, "loss": 1.2669, "step": 3858 }, { "epoch": 30.872, "grad_norm": 71.74677276611328, "learning_rate": 3.843555555555556e-05, "loss": 1.3566, "step": 3859 }, { "epoch": 30.88, "grad_norm": 26.20476531982422, "learning_rate": 3.8431111111111114e-05, "loss": 1.1924, "step": 3860 }, { "epoch": 30.888, "grad_norm": 20.45759391784668, "learning_rate": 3.842666666666667e-05, "loss": 1.3531, "step": 3861 }, { "epoch": 30.896, "grad_norm": 23.51613426208496, "learning_rate": 3.8422222222222224e-05, "loss": 1.0433, "step": 3862 }, { "epoch": 30.904, "grad_norm": 55.14702224731445, "learning_rate": 3.841777777777778e-05, "loss": 1.2813, "step": 3863 }, { "epoch": 30.912, "grad_norm": 61.02687072753906, "learning_rate": 3.8413333333333334e-05, "loss": 1.2039, "step": 3864 }, { "epoch": 30.92, "grad_norm": 19.775571823120117, "learning_rate": 3.840888888888889e-05, "loss": 0.8924, "step": 3865 }, { "epoch": 30.928, "grad_norm": 26.87969970703125, "learning_rate": 3.840444444444445e-05, "loss": 1.1541, "step": 3866 }, { "epoch": 30.936, "grad_norm": 33.231605529785156, "learning_rate": 3.8400000000000005e-05, "loss": 0.8337, "step": 3867 }, { "epoch": 30.944, "grad_norm": 24.230703353881836, "learning_rate": 3.839555555555556e-05, "loss": 1.067, "step": 3868 }, { "epoch": 30.951999999999998, "grad_norm": 37.350074768066406, "learning_rate": 3.839111111111111e-05, "loss": 1.1172, "step": 3869 }, { "epoch": 30.96, "grad_norm": 21.566146850585938, "learning_rate": 3.838666666666667e-05, "loss": 1.2731, "step": 3870 }, { "epoch": 30.968, "grad_norm": 39.26646423339844, "learning_rate": 3.8382222222222224e-05, "loss": 0.9095, "step": 3871 }, { "epoch": 30.976, "grad_norm": 22.34362030029297, "learning_rate": 3.837777777777778e-05, "loss": 1.1119, "step": 3872 }, { "epoch": 30.984, "grad_norm": 53.57741928100586, "learning_rate": 3.8373333333333334e-05, "loss": 1.4729, "step": 3873 }, { "epoch": 30.992, "grad_norm": 21.71986198425293, "learning_rate": 3.8368888888888896e-05, "loss": 0.9888, "step": 3874 }, { "epoch": 31.0, "grad_norm": 29.261180877685547, "learning_rate": 3.836444444444445e-05, "loss": 1.3644, "step": 3875 }, { "epoch": 31.0, "eval_loss": 1.3342229127883911, "eval_map": 0.2809, "eval_map_50": 0.6032, "eval_map_75": 0.2046, "eval_map_Coverall": 0.5228, "eval_map_Face_Shield": 0.2877, "eval_map_Gloves": 0.1993, "eval_map_Goggles": 0.1747, "eval_map_Mask": 0.2201, "eval_map_large": 0.3857, "eval_map_medium": 0.2279, "eval_map_small": 0.1595, "eval_mar_1": 0.2694, "eval_mar_10": 0.4628, "eval_mar_100": 0.4834, "eval_mar_100_Coverall": 0.6822, "eval_mar_100_Face_Shield": 0.6059, "eval_mar_100_Gloves": 0.3557, "eval_mar_100_Goggles": 0.4156, "eval_mar_100_Mask": 0.3577, "eval_mar_large": 0.6442, "eval_mar_medium": 0.3839, "eval_mar_small": 0.2369, "eval_runtime": 0.9035, "eval_samples_per_second": 32.096, "eval_steps_per_second": 2.214, "step": 3875 }, { "epoch": 31.008, "grad_norm": 38.899024963378906, "learning_rate": 3.836e-05, "loss": 1.0626, "step": 3876 }, { "epoch": 31.016, "grad_norm": 38.89250946044922, "learning_rate": 3.8355555555555553e-05, "loss": 1.0743, "step": 3877 }, { "epoch": 31.024, "grad_norm": 21.259845733642578, "learning_rate": 3.8351111111111115e-05, "loss": 1.1484, "step": 3878 }, { "epoch": 31.032, "grad_norm": 34.56085968017578, "learning_rate": 3.834666666666667e-05, "loss": 1.4342, "step": 3879 }, { "epoch": 31.04, "grad_norm": 31.429279327392578, "learning_rate": 3.8342222222222225e-05, "loss": 1.0437, "step": 3880 }, { "epoch": 31.048, "grad_norm": 19.275245666503906, "learning_rate": 3.833777777777778e-05, "loss": 1.2673, "step": 3881 }, { "epoch": 31.056, "grad_norm": 109.2490463256836, "learning_rate": 3.8333333333333334e-05, "loss": 1.0153, "step": 3882 }, { "epoch": 31.064, "grad_norm": 27.823314666748047, "learning_rate": 3.832888888888889e-05, "loss": 1.1178, "step": 3883 }, { "epoch": 31.072, "grad_norm": 38.8013916015625, "learning_rate": 3.8324444444444444e-05, "loss": 1.5567, "step": 3884 }, { "epoch": 31.08, "grad_norm": 23.020626068115234, "learning_rate": 3.832e-05, "loss": 1.6053, "step": 3885 }, { "epoch": 31.088, "grad_norm": 31.885704040527344, "learning_rate": 3.831555555555556e-05, "loss": 1.5991, "step": 3886 }, { "epoch": 31.096, "grad_norm": 33.28619384765625, "learning_rate": 3.8311111111111115e-05, "loss": 0.8998, "step": 3887 }, { "epoch": 31.104, "grad_norm": 37.54951477050781, "learning_rate": 3.830666666666667e-05, "loss": 3.4351, "step": 3888 }, { "epoch": 31.112, "grad_norm": 30.396129608154297, "learning_rate": 3.8302222222222225e-05, "loss": 0.798, "step": 3889 }, { "epoch": 31.12, "grad_norm": 89.8804702758789, "learning_rate": 3.829777777777778e-05, "loss": 1.2419, "step": 3890 }, { "epoch": 31.128, "grad_norm": 40.52051544189453, "learning_rate": 3.8293333333333335e-05, "loss": 1.0584, "step": 3891 }, { "epoch": 31.136, "grad_norm": 27.25218391418457, "learning_rate": 3.828888888888889e-05, "loss": 1.5172, "step": 3892 }, { "epoch": 31.144, "grad_norm": 23.219396591186523, "learning_rate": 3.8284444444444445e-05, "loss": 1.2919, "step": 3893 }, { "epoch": 31.152, "grad_norm": 48.74396896362305, "learning_rate": 3.828e-05, "loss": 1.0603, "step": 3894 }, { "epoch": 31.16, "grad_norm": 32.72584915161133, "learning_rate": 3.827555555555556e-05, "loss": 1.849, "step": 3895 }, { "epoch": 31.168, "grad_norm": 33.19354248046875, "learning_rate": 3.8271111111111116e-05, "loss": 1.1293, "step": 3896 }, { "epoch": 31.176, "grad_norm": 32.06515121459961, "learning_rate": 3.8266666666666664e-05, "loss": 1.2285, "step": 3897 }, { "epoch": 31.184, "grad_norm": 26.98899269104004, "learning_rate": 3.826222222222222e-05, "loss": 1.4328, "step": 3898 }, { "epoch": 31.192, "grad_norm": 20.942550659179688, "learning_rate": 3.825777777777778e-05, "loss": 1.4926, "step": 3899 }, { "epoch": 31.2, "grad_norm": 79.30599212646484, "learning_rate": 3.8253333333333335e-05, "loss": 0.9521, "step": 3900 }, { "epoch": 31.208, "grad_norm": 21.35260009765625, "learning_rate": 3.824888888888889e-05, "loss": 1.1408, "step": 3901 }, { "epoch": 31.216, "grad_norm": 16.03750228881836, "learning_rate": 3.8244444444444445e-05, "loss": 1.3742, "step": 3902 }, { "epoch": 31.224, "grad_norm": 28.492311477661133, "learning_rate": 3.8240000000000007e-05, "loss": 1.1916, "step": 3903 }, { "epoch": 31.232, "grad_norm": 33.560394287109375, "learning_rate": 3.8235555555555555e-05, "loss": 2.5468, "step": 3904 }, { "epoch": 31.24, "grad_norm": 24.282257080078125, "learning_rate": 3.823111111111111e-05, "loss": 1.0912, "step": 3905 }, { "epoch": 31.248, "grad_norm": 31.81477928161621, "learning_rate": 3.8226666666666664e-05, "loss": 1.182, "step": 3906 }, { "epoch": 31.256, "grad_norm": 58.31856155395508, "learning_rate": 3.8222222222222226e-05, "loss": 1.5493, "step": 3907 }, { "epoch": 31.264, "grad_norm": 23.215354919433594, "learning_rate": 3.821777777777778e-05, "loss": 1.1733, "step": 3908 }, { "epoch": 31.272, "grad_norm": 41.31269454956055, "learning_rate": 3.8213333333333336e-05, "loss": 1.0278, "step": 3909 }, { "epoch": 31.28, "grad_norm": 40.466522216796875, "learning_rate": 3.820888888888889e-05, "loss": 1.5388, "step": 3910 }, { "epoch": 31.288, "grad_norm": 25.337417602539062, "learning_rate": 3.8204444444444445e-05, "loss": 0.9065, "step": 3911 }, { "epoch": 31.296, "grad_norm": 29.943954467773438, "learning_rate": 3.82e-05, "loss": 1.3115, "step": 3912 }, { "epoch": 31.304, "grad_norm": 28.681095123291016, "learning_rate": 3.8195555555555555e-05, "loss": 1.0834, "step": 3913 }, { "epoch": 31.312, "grad_norm": 15.684305191040039, "learning_rate": 3.819111111111111e-05, "loss": 0.9719, "step": 3914 }, { "epoch": 31.32, "grad_norm": 57.991817474365234, "learning_rate": 3.818666666666667e-05, "loss": 0.8374, "step": 3915 }, { "epoch": 31.328, "grad_norm": 42.93978500366211, "learning_rate": 3.8182222222222226e-05, "loss": 1.4548, "step": 3916 }, { "epoch": 31.336, "grad_norm": 74.0536880493164, "learning_rate": 3.817777777777778e-05, "loss": 0.9423, "step": 3917 }, { "epoch": 31.344, "grad_norm": 44.27311325073242, "learning_rate": 3.8173333333333336e-05, "loss": 1.2141, "step": 3918 }, { "epoch": 31.352, "grad_norm": 37.442073822021484, "learning_rate": 3.816888888888889e-05, "loss": 1.9254, "step": 3919 }, { "epoch": 31.36, "grad_norm": 25.7818660736084, "learning_rate": 3.8164444444444446e-05, "loss": 1.0354, "step": 3920 }, { "epoch": 31.368, "grad_norm": 26.51028060913086, "learning_rate": 3.816e-05, "loss": 1.3672, "step": 3921 }, { "epoch": 31.376, "grad_norm": 25.299741744995117, "learning_rate": 3.8155555555555555e-05, "loss": 1.1149, "step": 3922 }, { "epoch": 31.384, "grad_norm": 30.069076538085938, "learning_rate": 3.815111111111112e-05, "loss": 1.0504, "step": 3923 }, { "epoch": 31.392, "grad_norm": 38.69300842285156, "learning_rate": 3.814666666666667e-05, "loss": 1.0093, "step": 3924 }, { "epoch": 31.4, "grad_norm": 37.50347137451172, "learning_rate": 3.814222222222223e-05, "loss": 1.3422, "step": 3925 }, { "epoch": 31.408, "grad_norm": 19.80921173095703, "learning_rate": 3.8137777777777775e-05, "loss": 1.0837, "step": 3926 }, { "epoch": 31.416, "grad_norm": 25.278173446655273, "learning_rate": 3.8133333333333336e-05, "loss": 1.211, "step": 3927 }, { "epoch": 31.424, "grad_norm": 14.312692642211914, "learning_rate": 3.812888888888889e-05, "loss": 0.6306, "step": 3928 }, { "epoch": 31.432, "grad_norm": 33.96992111206055, "learning_rate": 3.8124444444444446e-05, "loss": 1.4585, "step": 3929 }, { "epoch": 31.44, "grad_norm": 33.27183151245117, "learning_rate": 3.812e-05, "loss": 1.6691, "step": 3930 }, { "epoch": 31.448, "grad_norm": 78.07662200927734, "learning_rate": 3.811555555555556e-05, "loss": 1.2429, "step": 3931 }, { "epoch": 31.456, "grad_norm": 43.39216232299805, "learning_rate": 3.811111111111112e-05, "loss": 1.0714, "step": 3932 }, { "epoch": 31.464, "grad_norm": 33.1074333190918, "learning_rate": 3.8106666666666665e-05, "loss": 1.0538, "step": 3933 }, { "epoch": 31.472, "grad_norm": 28.753604888916016, "learning_rate": 3.810222222222222e-05, "loss": 1.2023, "step": 3934 }, { "epoch": 31.48, "grad_norm": 21.186687469482422, "learning_rate": 3.809777777777778e-05, "loss": 0.9091, "step": 3935 }, { "epoch": 31.488, "grad_norm": 30.36610984802246, "learning_rate": 3.809333333333334e-05, "loss": 1.6673, "step": 3936 }, { "epoch": 31.496, "grad_norm": 25.246013641357422, "learning_rate": 3.808888888888889e-05, "loss": 0.8292, "step": 3937 }, { "epoch": 31.504, "grad_norm": 48.31970977783203, "learning_rate": 3.8084444444444446e-05, "loss": 1.1524, "step": 3938 }, { "epoch": 31.512, "grad_norm": 33.18938446044922, "learning_rate": 3.808e-05, "loss": 1.1197, "step": 3939 }, { "epoch": 31.52, "grad_norm": 43.94514465332031, "learning_rate": 3.8075555555555556e-05, "loss": 1.7699, "step": 3940 }, { "epoch": 31.528, "grad_norm": 24.67304801940918, "learning_rate": 3.807111111111111e-05, "loss": 1.0969, "step": 3941 }, { "epoch": 31.536, "grad_norm": 68.58085632324219, "learning_rate": 3.8066666666666666e-05, "loss": 2.3061, "step": 3942 }, { "epoch": 31.544, "grad_norm": 36.32103729248047, "learning_rate": 3.806222222222222e-05, "loss": 1.1413, "step": 3943 }, { "epoch": 31.552, "grad_norm": 27.62924575805664, "learning_rate": 3.805777777777778e-05, "loss": 1.2896, "step": 3944 }, { "epoch": 31.56, "grad_norm": 37.22592544555664, "learning_rate": 3.805333333333334e-05, "loss": 1.4334, "step": 3945 }, { "epoch": 31.568, "grad_norm": 239.87432861328125, "learning_rate": 3.804888888888889e-05, "loss": 1.1502, "step": 3946 }, { "epoch": 31.576, "grad_norm": 20.969818115234375, "learning_rate": 3.804444444444445e-05, "loss": 1.5942, "step": 3947 }, { "epoch": 31.584, "grad_norm": 24.86852264404297, "learning_rate": 3.804e-05, "loss": 0.9599, "step": 3948 }, { "epoch": 31.592, "grad_norm": 55.57275390625, "learning_rate": 3.8035555555555557e-05, "loss": 1.4493, "step": 3949 }, { "epoch": 31.6, "grad_norm": 69.37109375, "learning_rate": 3.803111111111111e-05, "loss": 1.3447, "step": 3950 }, { "epoch": 31.608, "grad_norm": 55.66973876953125, "learning_rate": 3.8026666666666666e-05, "loss": 1.0566, "step": 3951 }, { "epoch": 31.616, "grad_norm": 29.58370590209961, "learning_rate": 3.802222222222223e-05, "loss": 1.4359, "step": 3952 }, { "epoch": 31.624, "grad_norm": 46.31754684448242, "learning_rate": 3.801777777777778e-05, "loss": 1.3028, "step": 3953 }, { "epoch": 31.632, "grad_norm": 69.69780731201172, "learning_rate": 3.801333333333333e-05, "loss": 1.076, "step": 3954 }, { "epoch": 31.64, "grad_norm": 88.46127319335938, "learning_rate": 3.8008888888888886e-05, "loss": 0.9124, "step": 3955 }, { "epoch": 31.648, "grad_norm": 327.2952880859375, "learning_rate": 3.800444444444445e-05, "loss": 1.1444, "step": 3956 }, { "epoch": 31.656, "grad_norm": 27.724748611450195, "learning_rate": 3.8e-05, "loss": 1.1494, "step": 3957 }, { "epoch": 31.664, "grad_norm": 29.217714309692383, "learning_rate": 3.799555555555556e-05, "loss": 1.1777, "step": 3958 }, { "epoch": 31.672, "grad_norm": 21.798839569091797, "learning_rate": 3.799111111111111e-05, "loss": 2.4706, "step": 3959 }, { "epoch": 31.68, "grad_norm": 27.01241111755371, "learning_rate": 3.7986666666666673e-05, "loss": 1.335, "step": 3960 }, { "epoch": 31.688, "grad_norm": 28.447818756103516, "learning_rate": 3.798222222222222e-05, "loss": 1.1588, "step": 3961 }, { "epoch": 31.696, "grad_norm": 25.81661605834961, "learning_rate": 3.7977777777777776e-05, "loss": 0.9887, "step": 3962 }, { "epoch": 31.704, "grad_norm": 19.99333381652832, "learning_rate": 3.797333333333333e-05, "loss": 1.136, "step": 3963 }, { "epoch": 31.712, "grad_norm": 13.937270164489746, "learning_rate": 3.796888888888889e-05, "loss": 0.9837, "step": 3964 }, { "epoch": 31.72, "grad_norm": 48.1353874206543, "learning_rate": 3.796444444444445e-05, "loss": 1.0112, "step": 3965 }, { "epoch": 31.728, "grad_norm": 68.02715301513672, "learning_rate": 3.796e-05, "loss": 0.9259, "step": 3966 }, { "epoch": 31.736, "grad_norm": 39.807796478271484, "learning_rate": 3.795555555555556e-05, "loss": 1.4832, "step": 3967 }, { "epoch": 31.744, "grad_norm": 36.96114730834961, "learning_rate": 3.795111111111111e-05, "loss": 1.1927, "step": 3968 }, { "epoch": 31.752, "grad_norm": 158.4302215576172, "learning_rate": 3.794666666666667e-05, "loss": 1.6234, "step": 3969 }, { "epoch": 31.76, "grad_norm": 21.552274703979492, "learning_rate": 3.794222222222222e-05, "loss": 1.3335, "step": 3970 }, { "epoch": 31.768, "grad_norm": 45.66322326660156, "learning_rate": 3.793777777777778e-05, "loss": 1.1821, "step": 3971 }, { "epoch": 31.776, "grad_norm": 38.83683776855469, "learning_rate": 3.793333333333334e-05, "loss": 0.9603, "step": 3972 }, { "epoch": 31.784, "grad_norm": 48.39932632446289, "learning_rate": 3.792888888888889e-05, "loss": 1.3285, "step": 3973 }, { "epoch": 31.792, "grad_norm": 30.21106719970703, "learning_rate": 3.792444444444445e-05, "loss": 1.1829, "step": 3974 }, { "epoch": 31.8, "grad_norm": 39.864158630371094, "learning_rate": 3.792e-05, "loss": 1.4352, "step": 3975 }, { "epoch": 31.808, "grad_norm": 272.0443420410156, "learning_rate": 3.791555555555556e-05, "loss": 0.9475, "step": 3976 }, { "epoch": 31.816, "grad_norm": 22.46958351135254, "learning_rate": 3.791111111111111e-05, "loss": 1.1218, "step": 3977 }, { "epoch": 31.824, "grad_norm": 32.051727294921875, "learning_rate": 3.790666666666667e-05, "loss": 1.074, "step": 3978 }, { "epoch": 31.832, "grad_norm": 19.305782318115234, "learning_rate": 3.790222222222222e-05, "loss": 0.8596, "step": 3979 }, { "epoch": 31.84, "grad_norm": 22.237220764160156, "learning_rate": 3.7897777777777784e-05, "loss": 1.102, "step": 3980 }, { "epoch": 31.848, "grad_norm": 47.31459426879883, "learning_rate": 3.789333333333334e-05, "loss": 1.4641, "step": 3981 }, { "epoch": 31.856, "grad_norm": 27.775291442871094, "learning_rate": 3.7888888888888894e-05, "loss": 1.283, "step": 3982 }, { "epoch": 31.864, "grad_norm": 28.62773895263672, "learning_rate": 3.788444444444444e-05, "loss": 1.3176, "step": 3983 }, { "epoch": 31.872, "grad_norm": 23.14803123474121, "learning_rate": 3.788e-05, "loss": 0.9635, "step": 3984 }, { "epoch": 31.88, "grad_norm": 31.229995727539062, "learning_rate": 3.787555555555556e-05, "loss": 1.4837, "step": 3985 }, { "epoch": 31.888, "grad_norm": 37.26385498046875, "learning_rate": 3.787111111111111e-05, "loss": 1.0315, "step": 3986 }, { "epoch": 31.896, "grad_norm": 31.69600486755371, "learning_rate": 3.786666666666667e-05, "loss": 1.2931, "step": 3987 }, { "epoch": 31.904, "grad_norm": 51.407833099365234, "learning_rate": 3.786222222222223e-05, "loss": 0.9231, "step": 3988 }, { "epoch": 31.912, "grad_norm": 20.381004333496094, "learning_rate": 3.7857777777777784e-05, "loss": 1.4941, "step": 3989 }, { "epoch": 31.92, "grad_norm": 41.93893814086914, "learning_rate": 3.785333333333333e-05, "loss": 1.0832, "step": 3990 }, { "epoch": 31.928, "grad_norm": 62.00871276855469, "learning_rate": 3.784888888888889e-05, "loss": 1.17, "step": 3991 }, { "epoch": 31.936, "grad_norm": 25.613643646240234, "learning_rate": 3.784444444444445e-05, "loss": 2.5026, "step": 3992 }, { "epoch": 31.944, "grad_norm": 25.865997314453125, "learning_rate": 3.7840000000000004e-05, "loss": 1.0179, "step": 3993 }, { "epoch": 31.951999999999998, "grad_norm": 15.103865623474121, "learning_rate": 3.783555555555556e-05, "loss": 1.33, "step": 3994 }, { "epoch": 31.96, "grad_norm": 35.836456298828125, "learning_rate": 3.783111111111111e-05, "loss": 1.3049, "step": 3995 }, { "epoch": 31.968, "grad_norm": 40.85650634765625, "learning_rate": 3.782666666666667e-05, "loss": 1.2755, "step": 3996 }, { "epoch": 31.976, "grad_norm": 25.535877227783203, "learning_rate": 3.782222222222222e-05, "loss": 0.9433, "step": 3997 }, { "epoch": 31.984, "grad_norm": 36.465755462646484, "learning_rate": 3.781777777777778e-05, "loss": 1.2342, "step": 3998 }, { "epoch": 31.992, "grad_norm": 24.3798770904541, "learning_rate": 3.781333333333333e-05, "loss": 1.0469, "step": 3999 }, { "epoch": 32.0, "grad_norm": 78.64558410644531, "learning_rate": 3.780888888888889e-05, "loss": 0.7906, "step": 4000 }, { "epoch": 32.0, "eval_loss": 1.2709617614746094, "eval_map": 0.3099, "eval_map_50": 0.6097, "eval_map_75": 0.2785, "eval_map_Coverall": 0.567, "eval_map_Face_Shield": 0.3526, "eval_map_Gloves": 0.219, "eval_map_Goggles": 0.1207, "eval_map_Mask": 0.2903, "eval_map_large": 0.4701, "eval_map_medium": 0.1853, "eval_map_small": 0.1458, "eval_mar_1": 0.2843, "eval_mar_10": 0.4839, "eval_mar_100": 0.5094, "eval_mar_100_Coverall": 0.6911, "eval_mar_100_Face_Shield": 0.6412, "eval_mar_100_Gloves": 0.3672, "eval_mar_100_Goggles": 0.4437, "eval_mar_100_Mask": 0.4038, "eval_mar_large": 0.6968, "eval_mar_medium": 0.3893, "eval_mar_small": 0.1909, "eval_runtime": 0.9122, "eval_samples_per_second": 31.791, "eval_steps_per_second": 2.193, "step": 4000 }, { "epoch": 32.008, "grad_norm": 33.94999313354492, "learning_rate": 3.780444444444445e-05, "loss": 1.2529, "step": 4001 }, { "epoch": 32.016, "grad_norm": 34.3214111328125, "learning_rate": 3.7800000000000004e-05, "loss": 0.878, "step": 4002 }, { "epoch": 32.024, "grad_norm": 22.926359176635742, "learning_rate": 3.779555555555556e-05, "loss": 0.7982, "step": 4003 }, { "epoch": 32.032, "grad_norm": 89.98529815673828, "learning_rate": 3.7791111111111114e-05, "loss": 1.1091, "step": 4004 }, { "epoch": 32.04, "grad_norm": 40.09869384765625, "learning_rate": 3.778666666666667e-05, "loss": 1.4025, "step": 4005 }, { "epoch": 32.048, "grad_norm": 22.1568660736084, "learning_rate": 3.778222222222222e-05, "loss": 1.2884, "step": 4006 }, { "epoch": 32.056, "grad_norm": 41.72019577026367, "learning_rate": 3.777777777777778e-05, "loss": 0.9251, "step": 4007 }, { "epoch": 32.064, "grad_norm": 27.185993194580078, "learning_rate": 3.777333333333333e-05, "loss": 1.5867, "step": 4008 }, { "epoch": 32.072, "grad_norm": 33.6057014465332, "learning_rate": 3.7768888888888895e-05, "loss": 1.2777, "step": 4009 }, { "epoch": 32.08, "grad_norm": 27.05630874633789, "learning_rate": 3.776444444444445e-05, "loss": 0.9272, "step": 4010 }, { "epoch": 32.088, "grad_norm": 56.269378662109375, "learning_rate": 3.776e-05, "loss": 0.9477, "step": 4011 }, { "epoch": 32.096, "grad_norm": 92.68749237060547, "learning_rate": 3.775555555555555e-05, "loss": 1.3297, "step": 4012 }, { "epoch": 32.104, "grad_norm": 51.58445739746094, "learning_rate": 3.7751111111111114e-05, "loss": 1.0892, "step": 4013 }, { "epoch": 32.112, "grad_norm": 44.15910720825195, "learning_rate": 3.774666666666667e-05, "loss": 1.7172, "step": 4014 }, { "epoch": 32.12, "grad_norm": 34.049964904785156, "learning_rate": 3.7742222222222224e-05, "loss": 1.0672, "step": 4015 }, { "epoch": 32.128, "grad_norm": 22.05910301208496, "learning_rate": 3.773777777777778e-05, "loss": 0.8572, "step": 4016 }, { "epoch": 32.136, "grad_norm": 21.1417236328125, "learning_rate": 3.773333333333334e-05, "loss": 1.9867, "step": 4017 }, { "epoch": 32.144, "grad_norm": 28.53738021850586, "learning_rate": 3.772888888888889e-05, "loss": 1.0682, "step": 4018 }, { "epoch": 32.152, "grad_norm": 14.937570571899414, "learning_rate": 3.772444444444444e-05, "loss": 1.1931, "step": 4019 }, { "epoch": 32.16, "grad_norm": 32.14681625366211, "learning_rate": 3.772e-05, "loss": 1.0598, "step": 4020 }, { "epoch": 32.168, "grad_norm": 24.612333297729492, "learning_rate": 3.771555555555556e-05, "loss": 1.3289, "step": 4021 }, { "epoch": 32.176, "grad_norm": 27.77199935913086, "learning_rate": 3.7711111111111114e-05, "loss": 0.8673, "step": 4022 }, { "epoch": 32.184, "grad_norm": 29.700674057006836, "learning_rate": 3.770666666666667e-05, "loss": 1.1553, "step": 4023 }, { "epoch": 32.192, "grad_norm": 109.91903686523438, "learning_rate": 3.7702222222222224e-05, "loss": 1.2999, "step": 4024 }, { "epoch": 32.2, "grad_norm": 28.420883178710938, "learning_rate": 3.769777777777778e-05, "loss": 0.974, "step": 4025 }, { "epoch": 32.208, "grad_norm": 27.82767105102539, "learning_rate": 3.7693333333333334e-05, "loss": 1.4206, "step": 4026 }, { "epoch": 32.216, "grad_norm": 39.37558364868164, "learning_rate": 3.768888888888889e-05, "loss": 1.3717, "step": 4027 }, { "epoch": 32.224, "grad_norm": 25.132734298706055, "learning_rate": 3.7684444444444444e-05, "loss": 1.5124, "step": 4028 }, { "epoch": 32.232, "grad_norm": 27.621780395507812, "learning_rate": 3.7680000000000005e-05, "loss": 1.5439, "step": 4029 }, { "epoch": 32.24, "grad_norm": 96.70732879638672, "learning_rate": 3.767555555555556e-05, "loss": 1.3503, "step": 4030 }, { "epoch": 32.248, "grad_norm": 32.67428207397461, "learning_rate": 3.7671111111111115e-05, "loss": 0.8609, "step": 4031 }, { "epoch": 32.256, "grad_norm": 37.55940628051758, "learning_rate": 3.766666666666667e-05, "loss": 0.9934, "step": 4032 }, { "epoch": 32.264, "grad_norm": 31.95945930480957, "learning_rate": 3.7662222222222225e-05, "loss": 1.6874, "step": 4033 }, { "epoch": 32.272, "grad_norm": 35.70658874511719, "learning_rate": 3.765777777777778e-05, "loss": 0.9677, "step": 4034 }, { "epoch": 32.28, "grad_norm": 28.626272201538086, "learning_rate": 3.7653333333333334e-05, "loss": 1.0445, "step": 4035 }, { "epoch": 32.288, "grad_norm": 31.06674575805664, "learning_rate": 3.764888888888889e-05, "loss": 0.8827, "step": 4036 }, { "epoch": 32.296, "grad_norm": 30.6817626953125, "learning_rate": 3.764444444444445e-05, "loss": 1.2248, "step": 4037 }, { "epoch": 32.304, "grad_norm": 139.7926025390625, "learning_rate": 3.7640000000000006e-05, "loss": 0.9851, "step": 4038 }, { "epoch": 32.312, "grad_norm": 23.735321044921875, "learning_rate": 3.763555555555556e-05, "loss": 1.145, "step": 4039 }, { "epoch": 32.32, "grad_norm": 35.68613815307617, "learning_rate": 3.763111111111111e-05, "loss": 1.4165, "step": 4040 }, { "epoch": 32.328, "grad_norm": 39.58685302734375, "learning_rate": 3.762666666666667e-05, "loss": 0.9459, "step": 4041 }, { "epoch": 32.336, "grad_norm": 28.8321533203125, "learning_rate": 3.7622222222222225e-05, "loss": 1.4604, "step": 4042 }, { "epoch": 32.344, "grad_norm": 34.65568161010742, "learning_rate": 3.761777777777778e-05, "loss": 1.1127, "step": 4043 }, { "epoch": 32.352, "grad_norm": 41.131996154785156, "learning_rate": 3.7613333333333335e-05, "loss": 0.9775, "step": 4044 }, { "epoch": 32.36, "grad_norm": 32.462013244628906, "learning_rate": 3.760888888888889e-05, "loss": 1.2948, "step": 4045 }, { "epoch": 32.368, "grad_norm": 41.21154022216797, "learning_rate": 3.760444444444445e-05, "loss": 1.397, "step": 4046 }, { "epoch": 32.376, "grad_norm": 22.890579223632812, "learning_rate": 3.76e-05, "loss": 1.5282, "step": 4047 }, { "epoch": 32.384, "grad_norm": 21.013826370239258, "learning_rate": 3.7595555555555554e-05, "loss": 1.0043, "step": 4048 }, { "epoch": 32.392, "grad_norm": 56.956024169921875, "learning_rate": 3.759111111111111e-05, "loss": 1.5114, "step": 4049 }, { "epoch": 32.4, "grad_norm": 22.312149047851562, "learning_rate": 3.758666666666667e-05, "loss": 0.9811, "step": 4050 }, { "epoch": 32.408, "grad_norm": 19.749319076538086, "learning_rate": 3.7582222222222225e-05, "loss": 1.455, "step": 4051 }, { "epoch": 32.416, "grad_norm": 39.29619216918945, "learning_rate": 3.757777777777778e-05, "loss": 1.248, "step": 4052 }, { "epoch": 32.424, "grad_norm": 23.96172523498535, "learning_rate": 3.7573333333333335e-05, "loss": 0.9877, "step": 4053 }, { "epoch": 32.432, "grad_norm": 48.12984848022461, "learning_rate": 3.756888888888889e-05, "loss": 1.3369, "step": 4054 }, { "epoch": 32.44, "grad_norm": 26.397611618041992, "learning_rate": 3.7564444444444445e-05, "loss": 1.6682, "step": 4055 }, { "epoch": 32.448, "grad_norm": 42.25136947631836, "learning_rate": 3.756e-05, "loss": 1.2255, "step": 4056 }, { "epoch": 32.456, "grad_norm": 21.701597213745117, "learning_rate": 3.7555555555555554e-05, "loss": 1.4693, "step": 4057 }, { "epoch": 32.464, "grad_norm": 33.779396057128906, "learning_rate": 3.7551111111111116e-05, "loss": 1.2665, "step": 4058 }, { "epoch": 32.472, "grad_norm": 32.729854583740234, "learning_rate": 3.754666666666667e-05, "loss": 1.1628, "step": 4059 }, { "epoch": 32.48, "grad_norm": 48.97102737426758, "learning_rate": 3.7542222222222226e-05, "loss": 1.1389, "step": 4060 }, { "epoch": 32.488, "grad_norm": 23.301973342895508, "learning_rate": 3.753777777777778e-05, "loss": 1.0119, "step": 4061 }, { "epoch": 32.496, "grad_norm": 45.41904067993164, "learning_rate": 3.7533333333333335e-05, "loss": 0.9073, "step": 4062 }, { "epoch": 32.504, "grad_norm": 63.56371307373047, "learning_rate": 3.752888888888889e-05, "loss": 1.3338, "step": 4063 }, { "epoch": 32.512, "grad_norm": 41.861732482910156, "learning_rate": 3.7524444444444445e-05, "loss": 1.0209, "step": 4064 }, { "epoch": 32.52, "grad_norm": 65.65584564208984, "learning_rate": 3.752e-05, "loss": 1.2325, "step": 4065 }, { "epoch": 32.528, "grad_norm": 36.2662239074707, "learning_rate": 3.751555555555556e-05, "loss": 1.3772, "step": 4066 }, { "epoch": 32.536, "grad_norm": 26.9016170501709, "learning_rate": 3.7511111111111116e-05, "loss": 1.1259, "step": 4067 }, { "epoch": 32.544, "grad_norm": 72.47918701171875, "learning_rate": 3.7506666666666664e-05, "loss": 2.1516, "step": 4068 }, { "epoch": 32.552, "grad_norm": 52.382633209228516, "learning_rate": 3.750222222222222e-05, "loss": 1.1077, "step": 4069 }, { "epoch": 32.56, "grad_norm": 29.41329002380371, "learning_rate": 3.749777777777778e-05, "loss": 0.9865, "step": 4070 }, { "epoch": 32.568, "grad_norm": 50.42142868041992, "learning_rate": 3.7493333333333336e-05, "loss": 0.9841, "step": 4071 }, { "epoch": 32.576, "grad_norm": 48.733299255371094, "learning_rate": 3.748888888888889e-05, "loss": 1.0222, "step": 4072 }, { "epoch": 32.584, "grad_norm": 36.06618118286133, "learning_rate": 3.7484444444444445e-05, "loss": 1.4034, "step": 4073 }, { "epoch": 32.592, "grad_norm": 17.77391815185547, "learning_rate": 3.748000000000001e-05, "loss": 0.9285, "step": 4074 }, { "epoch": 32.6, "grad_norm": 26.55916976928711, "learning_rate": 3.7475555555555555e-05, "loss": 1.1459, "step": 4075 }, { "epoch": 32.608, "grad_norm": 24.74608039855957, "learning_rate": 3.747111111111111e-05, "loss": 1.882, "step": 4076 }, { "epoch": 32.616, "grad_norm": 29.243953704833984, "learning_rate": 3.7466666666666665e-05, "loss": 1.3512, "step": 4077 }, { "epoch": 32.624, "grad_norm": 40.01591491699219, "learning_rate": 3.7462222222222226e-05, "loss": 1.1113, "step": 4078 }, { "epoch": 32.632, "grad_norm": 29.056665420532227, "learning_rate": 3.745777777777778e-05, "loss": 1.3083, "step": 4079 }, { "epoch": 32.64, "grad_norm": 29.444135665893555, "learning_rate": 3.7453333333333336e-05, "loss": 1.0315, "step": 4080 }, { "epoch": 32.648, "grad_norm": 14.98643970489502, "learning_rate": 3.744888888888889e-05, "loss": 1.3552, "step": 4081 }, { "epoch": 32.656, "grad_norm": 151.63217163085938, "learning_rate": 3.7444444444444446e-05, "loss": 1.0971, "step": 4082 }, { "epoch": 32.664, "grad_norm": 23.244417190551758, "learning_rate": 3.744e-05, "loss": 2.3527, "step": 4083 }, { "epoch": 32.672, "grad_norm": 22.667831420898438, "learning_rate": 3.7435555555555556e-05, "loss": 1.1397, "step": 4084 }, { "epoch": 32.68, "grad_norm": 34.976776123046875, "learning_rate": 3.743111111111111e-05, "loss": 1.2818, "step": 4085 }, { "epoch": 32.688, "grad_norm": 28.27040672302246, "learning_rate": 3.742666666666667e-05, "loss": 1.3107, "step": 4086 }, { "epoch": 32.696, "grad_norm": 33.61259078979492, "learning_rate": 3.742222222222223e-05, "loss": 1.485, "step": 4087 }, { "epoch": 32.704, "grad_norm": 22.813873291015625, "learning_rate": 3.741777777777778e-05, "loss": 1.3033, "step": 4088 }, { "epoch": 32.712, "grad_norm": 26.57341194152832, "learning_rate": 3.7413333333333337e-05, "loss": 1.106, "step": 4089 }, { "epoch": 32.72, "grad_norm": 41.94123458862305, "learning_rate": 3.740888888888889e-05, "loss": 1.4775, "step": 4090 }, { "epoch": 32.728, "grad_norm": 32.58701705932617, "learning_rate": 3.7404444444444446e-05, "loss": 1.1263, "step": 4091 }, { "epoch": 32.736, "grad_norm": 44.953277587890625, "learning_rate": 3.74e-05, "loss": 1.1913, "step": 4092 }, { "epoch": 32.744, "grad_norm": 27.102651596069336, "learning_rate": 3.7395555555555556e-05, "loss": 1.2421, "step": 4093 }, { "epoch": 32.752, "grad_norm": 17.184385299682617, "learning_rate": 3.739111111111112e-05, "loss": 0.9301, "step": 4094 }, { "epoch": 32.76, "grad_norm": 61.38075256347656, "learning_rate": 3.738666666666667e-05, "loss": 1.734, "step": 4095 }, { "epoch": 32.768, "grad_norm": 19.871007919311523, "learning_rate": 3.738222222222223e-05, "loss": 1.1463, "step": 4096 }, { "epoch": 32.776, "grad_norm": 50.808876037597656, "learning_rate": 3.7377777777777775e-05, "loss": 1.1596, "step": 4097 }, { "epoch": 32.784, "grad_norm": 27.49930763244629, "learning_rate": 3.737333333333333e-05, "loss": 0.9635, "step": 4098 }, { "epoch": 32.792, "grad_norm": 34.27925491333008, "learning_rate": 3.736888888888889e-05, "loss": 1.0502, "step": 4099 }, { "epoch": 32.8, "grad_norm": 34.852760314941406, "learning_rate": 3.7364444444444447e-05, "loss": 1.439, "step": 4100 }, { "epoch": 32.808, "grad_norm": 57.52488327026367, "learning_rate": 3.736e-05, "loss": 0.931, "step": 4101 }, { "epoch": 32.816, "grad_norm": 36.176795959472656, "learning_rate": 3.7355555555555556e-05, "loss": 1.2378, "step": 4102 }, { "epoch": 32.824, "grad_norm": 26.67262077331543, "learning_rate": 3.735111111111111e-05, "loss": 0.9295, "step": 4103 }, { "epoch": 32.832, "grad_norm": 40.15385818481445, "learning_rate": 3.7346666666666666e-05, "loss": 2.1091, "step": 4104 }, { "epoch": 32.84, "grad_norm": 55.20027542114258, "learning_rate": 3.734222222222222e-05, "loss": 1.286, "step": 4105 }, { "epoch": 32.848, "grad_norm": 30.000385284423828, "learning_rate": 3.7337777777777776e-05, "loss": 1.7177, "step": 4106 }, { "epoch": 32.856, "grad_norm": 22.919082641601562, "learning_rate": 3.733333333333334e-05, "loss": 1.5812, "step": 4107 }, { "epoch": 32.864, "grad_norm": 21.81382942199707, "learning_rate": 3.732888888888889e-05, "loss": 0.9635, "step": 4108 }, { "epoch": 32.872, "grad_norm": 46.24180603027344, "learning_rate": 3.732444444444445e-05, "loss": 1.2264, "step": 4109 }, { "epoch": 32.88, "grad_norm": 110.48451232910156, "learning_rate": 3.732e-05, "loss": 3.2371, "step": 4110 }, { "epoch": 32.888, "grad_norm": 51.34858703613281, "learning_rate": 3.731555555555556e-05, "loss": 1.116, "step": 4111 }, { "epoch": 32.896, "grad_norm": 30.734508514404297, "learning_rate": 3.731111111111111e-05, "loss": 0.9237, "step": 4112 }, { "epoch": 32.904, "grad_norm": 25.77876853942871, "learning_rate": 3.7306666666666666e-05, "loss": 1.1529, "step": 4113 }, { "epoch": 32.912, "grad_norm": 21.224834442138672, "learning_rate": 3.730222222222222e-05, "loss": 1.4525, "step": 4114 }, { "epoch": 32.92, "grad_norm": 21.87966537475586, "learning_rate": 3.729777777777778e-05, "loss": 1.0986, "step": 4115 }, { "epoch": 32.928, "grad_norm": 56.99801254272461, "learning_rate": 3.729333333333334e-05, "loss": 0.8322, "step": 4116 }, { "epoch": 32.936, "grad_norm": 35.624568939208984, "learning_rate": 3.728888888888889e-05, "loss": 1.5592, "step": 4117 }, { "epoch": 32.944, "grad_norm": 24.56744384765625, "learning_rate": 3.728444444444445e-05, "loss": 1.1357, "step": 4118 }, { "epoch": 32.952, "grad_norm": 29.869718551635742, "learning_rate": 3.728e-05, "loss": 0.9989, "step": 4119 }, { "epoch": 32.96, "grad_norm": 15.077817916870117, "learning_rate": 3.727555555555556e-05, "loss": 0.9766, "step": 4120 }, { "epoch": 32.968, "grad_norm": 32.2596549987793, "learning_rate": 3.727111111111111e-05, "loss": 1.4927, "step": 4121 }, { "epoch": 32.976, "grad_norm": 24.030197143554688, "learning_rate": 3.726666666666667e-05, "loss": 1.344, "step": 4122 }, { "epoch": 32.984, "grad_norm": 74.87065887451172, "learning_rate": 3.726222222222223e-05, "loss": 1.1863, "step": 4123 }, { "epoch": 32.992, "grad_norm": 50.78618621826172, "learning_rate": 3.725777777777778e-05, "loss": 1.2921, "step": 4124 }, { "epoch": 33.0, "grad_norm": 48.54697036743164, "learning_rate": 3.725333333333333e-05, "loss": 1.2277, "step": 4125 }, { "epoch": 33.0, "eval_loss": 1.2485333681106567, "eval_map": 0.3053, "eval_map_50": 0.6074, "eval_map_75": 0.2555, "eval_map_Coverall": 0.5392, "eval_map_Face_Shield": 0.2574, "eval_map_Gloves": 0.2443, "eval_map_Goggles": 0.1664, "eval_map_Mask": 0.319, "eval_map_large": 0.4663, "eval_map_medium": 0.2652, "eval_map_small": 0.1667, "eval_mar_1": 0.2689, "eval_mar_10": 0.4892, "eval_mar_100": 0.5139, "eval_mar_100_Coverall": 0.7067, "eval_mar_100_Face_Shield": 0.5824, "eval_mar_100_Gloves": 0.4033, "eval_mar_100_Goggles": 0.4656, "eval_mar_100_Mask": 0.4115, "eval_mar_large": 0.6539, "eval_mar_medium": 0.4461, "eval_mar_small": 0.2375, "eval_runtime": 0.9082, "eval_samples_per_second": 31.93, "eval_steps_per_second": 2.202, "step": 4125 }, { "epoch": 33.008, "grad_norm": 27.946744918823242, "learning_rate": 3.7248888888888886e-05, "loss": 1.5799, "step": 4126 }, { "epoch": 33.016, "grad_norm": 65.07792663574219, "learning_rate": 3.724444444444445e-05, "loss": 1.0095, "step": 4127 }, { "epoch": 33.024, "grad_norm": 31.367902755737305, "learning_rate": 3.724e-05, "loss": 1.0632, "step": 4128 }, { "epoch": 33.032, "grad_norm": 31.462244033813477, "learning_rate": 3.723555555555556e-05, "loss": 1.0314, "step": 4129 }, { "epoch": 33.04, "grad_norm": 27.24494171142578, "learning_rate": 3.723111111111111e-05, "loss": 1.5471, "step": 4130 }, { "epoch": 33.048, "grad_norm": 22.754817962646484, "learning_rate": 3.7226666666666674e-05, "loss": 1.2932, "step": 4131 }, { "epoch": 33.056, "grad_norm": 30.628942489624023, "learning_rate": 3.722222222222222e-05, "loss": 2.4922, "step": 4132 }, { "epoch": 33.064, "grad_norm": 45.309852600097656, "learning_rate": 3.721777777777778e-05, "loss": 1.3222, "step": 4133 }, { "epoch": 33.072, "grad_norm": 12.193672180175781, "learning_rate": 3.721333333333333e-05, "loss": 0.8026, "step": 4134 }, { "epoch": 33.08, "grad_norm": 26.417682647705078, "learning_rate": 3.720888888888889e-05, "loss": 1.314, "step": 4135 }, { "epoch": 33.088, "grad_norm": 36.68531036376953, "learning_rate": 3.720444444444445e-05, "loss": 0.7251, "step": 4136 }, { "epoch": 33.096, "grad_norm": 21.0161190032959, "learning_rate": 3.72e-05, "loss": 1.4021, "step": 4137 }, { "epoch": 33.104, "grad_norm": 28.13772964477539, "learning_rate": 3.719555555555556e-05, "loss": 1.4287, "step": 4138 }, { "epoch": 33.112, "grad_norm": 54.32699966430664, "learning_rate": 3.719111111111111e-05, "loss": 1.5311, "step": 4139 }, { "epoch": 33.12, "grad_norm": 32.68547058105469, "learning_rate": 3.718666666666667e-05, "loss": 1.0823, "step": 4140 }, { "epoch": 33.128, "grad_norm": 30.592281341552734, "learning_rate": 3.718222222222222e-05, "loss": 1.3269, "step": 4141 }, { "epoch": 33.136, "grad_norm": 47.24077224731445, "learning_rate": 3.717777777777778e-05, "loss": 1.2663, "step": 4142 }, { "epoch": 33.144, "grad_norm": 27.772872924804688, "learning_rate": 3.717333333333334e-05, "loss": 1.0131, "step": 4143 }, { "epoch": 33.152, "grad_norm": 35.41189956665039, "learning_rate": 3.7168888888888894e-05, "loss": 1.3381, "step": 4144 }, { "epoch": 33.16, "grad_norm": 32.85588836669922, "learning_rate": 3.716444444444445e-05, "loss": 0.9894, "step": 4145 }, { "epoch": 33.168, "grad_norm": 170.33078002929688, "learning_rate": 3.716e-05, "loss": 1.2139, "step": 4146 }, { "epoch": 33.176, "grad_norm": 52.19281768798828, "learning_rate": 3.715555555555555e-05, "loss": 1.02, "step": 4147 }, { "epoch": 33.184, "grad_norm": 19.79574966430664, "learning_rate": 3.715111111111111e-05, "loss": 1.3498, "step": 4148 }, { "epoch": 33.192, "grad_norm": 25.75760841369629, "learning_rate": 3.714666666666667e-05, "loss": 1.3327, "step": 4149 }, { "epoch": 33.2, "grad_norm": 43.24441146850586, "learning_rate": 3.714222222222222e-05, "loss": 1.1866, "step": 4150 }, { "epoch": 33.208, "grad_norm": 32.317317962646484, "learning_rate": 3.713777777777778e-05, "loss": 1.4965, "step": 4151 }, { "epoch": 33.216, "grad_norm": 40.233768463134766, "learning_rate": 3.713333333333334e-05, "loss": 1.374, "step": 4152 }, { "epoch": 33.224, "grad_norm": 39.4413948059082, "learning_rate": 3.7128888888888894e-05, "loss": 0.9031, "step": 4153 }, { "epoch": 33.232, "grad_norm": 21.177562713623047, "learning_rate": 3.712444444444444e-05, "loss": 1.1914, "step": 4154 }, { "epoch": 33.24, "grad_norm": 27.56406593322754, "learning_rate": 3.712e-05, "loss": 1.1471, "step": 4155 }, { "epoch": 33.248, "grad_norm": 24.264516830444336, "learning_rate": 3.711555555555556e-05, "loss": 0.8642, "step": 4156 }, { "epoch": 33.256, "grad_norm": 28.0869083404541, "learning_rate": 3.7111111111111113e-05, "loss": 1.0864, "step": 4157 }, { "epoch": 33.264, "grad_norm": 22.97040557861328, "learning_rate": 3.710666666666667e-05, "loss": 1.0887, "step": 4158 }, { "epoch": 33.272, "grad_norm": 28.04998207092285, "learning_rate": 3.710222222222222e-05, "loss": 1.3391, "step": 4159 }, { "epoch": 33.28, "grad_norm": 26.91746711730957, "learning_rate": 3.709777777777778e-05, "loss": 0.9204, "step": 4160 }, { "epoch": 33.288, "grad_norm": 23.211809158325195, "learning_rate": 3.709333333333333e-05, "loss": 1.219, "step": 4161 }, { "epoch": 33.296, "grad_norm": 19.45076560974121, "learning_rate": 3.708888888888889e-05, "loss": 1.1722, "step": 4162 }, { "epoch": 33.304, "grad_norm": 19.277061462402344, "learning_rate": 3.708444444444444e-05, "loss": 1.2046, "step": 4163 }, { "epoch": 33.312, "grad_norm": 21.052326202392578, "learning_rate": 3.7080000000000004e-05, "loss": 1.3212, "step": 4164 }, { "epoch": 33.32, "grad_norm": 20.033004760742188, "learning_rate": 3.707555555555556e-05, "loss": 1.1572, "step": 4165 }, { "epoch": 33.328, "grad_norm": 60.822593688964844, "learning_rate": 3.7071111111111114e-05, "loss": 1.1855, "step": 4166 }, { "epoch": 33.336, "grad_norm": 19.20285415649414, "learning_rate": 3.706666666666667e-05, "loss": 1.2508, "step": 4167 }, { "epoch": 33.344, "grad_norm": 21.7659912109375, "learning_rate": 3.7062222222222224e-05, "loss": 1.0404, "step": 4168 }, { "epoch": 33.352, "grad_norm": 46.9945182800293, "learning_rate": 3.705777777777778e-05, "loss": 1.449, "step": 4169 }, { "epoch": 33.36, "grad_norm": 33.03047561645508, "learning_rate": 3.705333333333333e-05, "loss": 1.4465, "step": 4170 }, { "epoch": 33.368, "grad_norm": 23.595003128051758, "learning_rate": 3.704888888888889e-05, "loss": 1.1635, "step": 4171 }, { "epoch": 33.376, "grad_norm": 37.67658996582031, "learning_rate": 3.704444444444445e-05, "loss": 0.899, "step": 4172 }, { "epoch": 33.384, "grad_norm": 59.089134216308594, "learning_rate": 3.7040000000000005e-05, "loss": 1.6609, "step": 4173 }, { "epoch": 33.392, "grad_norm": 43.19110870361328, "learning_rate": 3.703555555555556e-05, "loss": 1.2945, "step": 4174 }, { "epoch": 33.4, "grad_norm": 49.29085159301758, "learning_rate": 3.7031111111111114e-05, "loss": 1.0835, "step": 4175 }, { "epoch": 33.408, "grad_norm": 32.041778564453125, "learning_rate": 3.702666666666667e-05, "loss": 0.9197, "step": 4176 }, { "epoch": 33.416, "grad_norm": 35.662479400634766, "learning_rate": 3.7022222222222224e-05, "loss": 1.1985, "step": 4177 }, { "epoch": 33.424, "grad_norm": 27.343923568725586, "learning_rate": 3.701777777777778e-05, "loss": 0.9434, "step": 4178 }, { "epoch": 33.432, "grad_norm": 29.43977165222168, "learning_rate": 3.7013333333333334e-05, "loss": 1.1865, "step": 4179 }, { "epoch": 33.44, "grad_norm": 50.275665283203125, "learning_rate": 3.7008888888888895e-05, "loss": 1.5401, "step": 4180 }, { "epoch": 33.448, "grad_norm": 27.629487991333008, "learning_rate": 3.700444444444445e-05, "loss": 2.1405, "step": 4181 }, { "epoch": 33.456, "grad_norm": 31.68697166442871, "learning_rate": 3.7e-05, "loss": 1.0404, "step": 4182 }, { "epoch": 33.464, "grad_norm": 35.86703872680664, "learning_rate": 3.699555555555555e-05, "loss": 1.0791, "step": 4183 }, { "epoch": 33.472, "grad_norm": 327.2301940917969, "learning_rate": 3.6991111111111115e-05, "loss": 1.3794, "step": 4184 }, { "epoch": 33.48, "grad_norm": 57.87813949584961, "learning_rate": 3.698666666666667e-05, "loss": 1.2666, "step": 4185 }, { "epoch": 33.488, "grad_norm": 29.888002395629883, "learning_rate": 3.6982222222222224e-05, "loss": 1.2416, "step": 4186 }, { "epoch": 33.496, "grad_norm": 24.267192840576172, "learning_rate": 3.697777777777778e-05, "loss": 0.8165, "step": 4187 }, { "epoch": 33.504, "grad_norm": 34.94853210449219, "learning_rate": 3.697333333333334e-05, "loss": 1.2753, "step": 4188 }, { "epoch": 33.512, "grad_norm": 35.50909423828125, "learning_rate": 3.696888888888889e-05, "loss": 1.1235, "step": 4189 }, { "epoch": 33.52, "grad_norm": 18.45343589782715, "learning_rate": 3.6964444444444444e-05, "loss": 0.8467, "step": 4190 }, { "epoch": 33.528, "grad_norm": 27.954387664794922, "learning_rate": 3.696e-05, "loss": 1.169, "step": 4191 }, { "epoch": 33.536, "grad_norm": 28.626195907592773, "learning_rate": 3.695555555555556e-05, "loss": 1.0149, "step": 4192 }, { "epoch": 33.544, "grad_norm": 17.427648544311523, "learning_rate": 3.6951111111111115e-05, "loss": 0.8669, "step": 4193 }, { "epoch": 33.552, "grad_norm": 30.26664924621582, "learning_rate": 3.694666666666667e-05, "loss": 1.0027, "step": 4194 }, { "epoch": 33.56, "grad_norm": 17.016767501831055, "learning_rate": 3.6942222222222225e-05, "loss": 0.9975, "step": 4195 }, { "epoch": 33.568, "grad_norm": 27.31928253173828, "learning_rate": 3.693777777777778e-05, "loss": 1.2463, "step": 4196 }, { "epoch": 33.576, "grad_norm": 23.035303115844727, "learning_rate": 3.6933333333333334e-05, "loss": 1.1405, "step": 4197 }, { "epoch": 33.584, "grad_norm": 21.356225967407227, "learning_rate": 3.692888888888889e-05, "loss": 1.0884, "step": 4198 }, { "epoch": 33.592, "grad_norm": 40.470577239990234, "learning_rate": 3.6924444444444444e-05, "loss": 1.2746, "step": 4199 }, { "epoch": 33.6, "grad_norm": 53.85342788696289, "learning_rate": 3.692e-05, "loss": 1.1988, "step": 4200 }, { "epoch": 33.608, "grad_norm": 29.48416519165039, "learning_rate": 3.691555555555556e-05, "loss": 1.3161, "step": 4201 }, { "epoch": 33.616, "grad_norm": 18.986675262451172, "learning_rate": 3.6911111111111115e-05, "loss": 1.1843, "step": 4202 }, { "epoch": 33.624, "grad_norm": 44.22386932373047, "learning_rate": 3.690666666666667e-05, "loss": 1.0455, "step": 4203 }, { "epoch": 33.632, "grad_norm": 70.51152801513672, "learning_rate": 3.690222222222222e-05, "loss": 3.4165, "step": 4204 }, { "epoch": 33.64, "grad_norm": 29.204599380493164, "learning_rate": 3.689777777777778e-05, "loss": 1.3674, "step": 4205 }, { "epoch": 33.648, "grad_norm": 30.922632217407227, "learning_rate": 3.6893333333333335e-05, "loss": 1.5734, "step": 4206 }, { "epoch": 33.656, "grad_norm": 80.82939147949219, "learning_rate": 3.688888888888889e-05, "loss": 1.2735, "step": 4207 }, { "epoch": 33.664, "grad_norm": 48.22208023071289, "learning_rate": 3.6884444444444444e-05, "loss": 1.5285, "step": 4208 }, { "epoch": 33.672, "grad_norm": 143.70028686523438, "learning_rate": 3.6880000000000006e-05, "loss": 1.0814, "step": 4209 }, { "epoch": 33.68, "grad_norm": 17.747228622436523, "learning_rate": 3.687555555555556e-05, "loss": 1.2317, "step": 4210 }, { "epoch": 33.688, "grad_norm": 31.91831398010254, "learning_rate": 3.687111111111111e-05, "loss": 0.9119, "step": 4211 }, { "epoch": 33.696, "grad_norm": 17.528549194335938, "learning_rate": 3.6866666666666664e-05, "loss": 1.0491, "step": 4212 }, { "epoch": 33.704, "grad_norm": 35.28232192993164, "learning_rate": 3.6862222222222225e-05, "loss": 1.2223, "step": 4213 }, { "epoch": 33.712, "grad_norm": 35.28565979003906, "learning_rate": 3.685777777777778e-05, "loss": 1.2032, "step": 4214 }, { "epoch": 33.72, "grad_norm": 33.30052185058594, "learning_rate": 3.6853333333333335e-05, "loss": 2.2816, "step": 4215 }, { "epoch": 33.728, "grad_norm": 19.60332489013672, "learning_rate": 3.684888888888889e-05, "loss": 1.3328, "step": 4216 }, { "epoch": 33.736, "grad_norm": 40.62601089477539, "learning_rate": 3.6844444444444445e-05, "loss": 1.0549, "step": 4217 }, { "epoch": 33.744, "grad_norm": 154.4104766845703, "learning_rate": 3.684e-05, "loss": 1.1889, "step": 4218 }, { "epoch": 33.752, "grad_norm": 34.64913558959961, "learning_rate": 3.6835555555555554e-05, "loss": 1.2792, "step": 4219 }, { "epoch": 33.76, "grad_norm": 23.447784423828125, "learning_rate": 3.683111111111111e-05, "loss": 1.1446, "step": 4220 }, { "epoch": 33.768, "grad_norm": 30.007902145385742, "learning_rate": 3.682666666666667e-05, "loss": 1.3844, "step": 4221 }, { "epoch": 33.776, "grad_norm": 34.29286193847656, "learning_rate": 3.6822222222222226e-05, "loss": 1.0491, "step": 4222 }, { "epoch": 33.784, "grad_norm": 21.285207748413086, "learning_rate": 3.681777777777778e-05, "loss": 1.2927, "step": 4223 }, { "epoch": 33.792, "grad_norm": 108.81892395019531, "learning_rate": 3.6813333333333335e-05, "loss": 1.5871, "step": 4224 }, { "epoch": 33.8, "grad_norm": 33.16172790527344, "learning_rate": 3.680888888888889e-05, "loss": 0.9902, "step": 4225 }, { "epoch": 33.808, "grad_norm": 19.70855712890625, "learning_rate": 3.6804444444444445e-05, "loss": 0.8029, "step": 4226 }, { "epoch": 33.816, "grad_norm": 36.25909423828125, "learning_rate": 3.68e-05, "loss": 1.833, "step": 4227 }, { "epoch": 33.824, "grad_norm": 26.34271240234375, "learning_rate": 3.6795555555555555e-05, "loss": 0.9195, "step": 4228 }, { "epoch": 33.832, "grad_norm": 29.525678634643555, "learning_rate": 3.6791111111111116e-05, "loss": 1.0344, "step": 4229 }, { "epoch": 33.84, "grad_norm": 25.078784942626953, "learning_rate": 3.678666666666667e-05, "loss": 2.131, "step": 4230 }, { "epoch": 33.848, "grad_norm": 104.8345947265625, "learning_rate": 3.6782222222222226e-05, "loss": 1.3014, "step": 4231 }, { "epoch": 33.856, "grad_norm": 34.709991455078125, "learning_rate": 3.677777777777778e-05, "loss": 1.429, "step": 4232 }, { "epoch": 33.864, "grad_norm": 16.10024070739746, "learning_rate": 3.6773333333333336e-05, "loss": 1.0634, "step": 4233 }, { "epoch": 33.872, "grad_norm": 52.82359313964844, "learning_rate": 3.676888888888889e-05, "loss": 1.588, "step": 4234 }, { "epoch": 33.88, "grad_norm": 37.35024642944336, "learning_rate": 3.6764444444444446e-05, "loss": 1.1356, "step": 4235 }, { "epoch": 33.888, "grad_norm": 26.567184448242188, "learning_rate": 3.676e-05, "loss": 1.3517, "step": 4236 }, { "epoch": 33.896, "grad_norm": 46.171226501464844, "learning_rate": 3.675555555555556e-05, "loss": 1.5801, "step": 4237 }, { "epoch": 33.904, "grad_norm": 33.45065689086914, "learning_rate": 3.675111111111112e-05, "loss": 1.2095, "step": 4238 }, { "epoch": 33.912, "grad_norm": 26.74775505065918, "learning_rate": 3.6746666666666665e-05, "loss": 1.281, "step": 4239 }, { "epoch": 33.92, "grad_norm": 47.67668914794922, "learning_rate": 3.674222222222222e-05, "loss": 0.9513, "step": 4240 }, { "epoch": 33.928, "grad_norm": 28.249210357666016, "learning_rate": 3.673777777777778e-05, "loss": 1.3022, "step": 4241 }, { "epoch": 33.936, "grad_norm": 14.187994003295898, "learning_rate": 3.6733333333333336e-05, "loss": 0.9295, "step": 4242 }, { "epoch": 33.944, "grad_norm": 40.600563049316406, "learning_rate": 3.672888888888889e-05, "loss": 0.8814, "step": 4243 }, { "epoch": 33.952, "grad_norm": 23.55075454711914, "learning_rate": 3.6724444444444446e-05, "loss": 1.3925, "step": 4244 }, { "epoch": 33.96, "grad_norm": 31.389404296875, "learning_rate": 3.672000000000001e-05, "loss": 1.7627, "step": 4245 }, { "epoch": 33.968, "grad_norm": 31.29239845275879, "learning_rate": 3.6715555555555556e-05, "loss": 1.8633, "step": 4246 }, { "epoch": 33.976, "grad_norm": 35.319175720214844, "learning_rate": 3.671111111111111e-05, "loss": 1.0892, "step": 4247 }, { "epoch": 33.984, "grad_norm": 153.52166748046875, "learning_rate": 3.6706666666666665e-05, "loss": 1.4054, "step": 4248 }, { "epoch": 33.992, "grad_norm": 49.025108337402344, "learning_rate": 3.670222222222223e-05, "loss": 1.1381, "step": 4249 }, { "epoch": 34.0, "grad_norm": 25.383106231689453, "learning_rate": 3.669777777777778e-05, "loss": 1.2304, "step": 4250 }, { "epoch": 34.0, "eval_loss": 1.2465307712554932, "eval_map": 0.2934, "eval_map_50": 0.5809, "eval_map_75": 0.2565, "eval_map_Coverall": 0.574, "eval_map_Face_Shield": 0.216, "eval_map_Gloves": 0.2356, "eval_map_Goggles": 0.0764, "eval_map_Mask": 0.3651, "eval_map_large": 0.5186, "eval_map_medium": 0.2349, "eval_map_small": 0.1333, "eval_mar_1": 0.2716, "eval_mar_10": 0.4672, "eval_mar_100": 0.4892, "eval_mar_100_Coverall": 0.7067, "eval_mar_100_Face_Shield": 0.5235, "eval_mar_100_Gloves": 0.3787, "eval_mar_100_Goggles": 0.3719, "eval_mar_100_Mask": 0.4654, "eval_mar_large": 0.657, "eval_mar_medium": 0.4257, "eval_mar_small": 0.2269, "eval_runtime": 0.9199, "eval_samples_per_second": 31.526, "eval_steps_per_second": 2.174, "step": 4250 }, { "epoch": 34.008, "grad_norm": 32.636226654052734, "learning_rate": 3.669333333333334e-05, "loss": 0.9811, "step": 4251 }, { "epoch": 34.016, "grad_norm": 43.735294342041016, "learning_rate": 3.668888888888889e-05, "loss": 1.3168, "step": 4252 }, { "epoch": 34.024, "grad_norm": 20.206619262695312, "learning_rate": 3.6684444444444446e-05, "loss": 1.2229, "step": 4253 }, { "epoch": 34.032, "grad_norm": 33.1250114440918, "learning_rate": 3.668e-05, "loss": 1.3817, "step": 4254 }, { "epoch": 34.04, "grad_norm": 55.48213195800781, "learning_rate": 3.6675555555555556e-05, "loss": 0.8915, "step": 4255 }, { "epoch": 34.048, "grad_norm": 49.97244644165039, "learning_rate": 3.667111111111111e-05, "loss": 1.202, "step": 4256 }, { "epoch": 34.056, "grad_norm": 22.07050895690918, "learning_rate": 3.6666666666666666e-05, "loss": 1.7828, "step": 4257 }, { "epoch": 34.064, "grad_norm": 19.20821762084961, "learning_rate": 3.666222222222223e-05, "loss": 1.1809, "step": 4258 }, { "epoch": 34.072, "grad_norm": 20.77199935913086, "learning_rate": 3.665777777777778e-05, "loss": 1.3316, "step": 4259 }, { "epoch": 34.08, "grad_norm": 24.472915649414062, "learning_rate": 3.665333333333334e-05, "loss": 1.5281, "step": 4260 }, { "epoch": 34.088, "grad_norm": 38.91160202026367, "learning_rate": 3.6648888888888885e-05, "loss": 1.269, "step": 4261 }, { "epoch": 34.096, "grad_norm": 39.196170806884766, "learning_rate": 3.664444444444445e-05, "loss": 1.2944, "step": 4262 }, { "epoch": 34.104, "grad_norm": 26.15863609313965, "learning_rate": 3.664e-05, "loss": 0.9597, "step": 4263 }, { "epoch": 34.112, "grad_norm": 20.451709747314453, "learning_rate": 3.6635555555555556e-05, "loss": 1.3252, "step": 4264 }, { "epoch": 34.12, "grad_norm": 66.1480712890625, "learning_rate": 3.663111111111111e-05, "loss": 2.0049, "step": 4265 }, { "epoch": 34.128, "grad_norm": 98.37454986572266, "learning_rate": 3.662666666666667e-05, "loss": 1.1171, "step": 4266 }, { "epoch": 34.136, "grad_norm": 37.025882720947266, "learning_rate": 3.662222222222223e-05, "loss": 1.1549, "step": 4267 }, { "epoch": 34.144, "grad_norm": 45.68608093261719, "learning_rate": 3.6617777777777776e-05, "loss": 1.059, "step": 4268 }, { "epoch": 34.152, "grad_norm": 22.56583595275879, "learning_rate": 3.661333333333333e-05, "loss": 1.1341, "step": 4269 }, { "epoch": 34.16, "grad_norm": 17.425485610961914, "learning_rate": 3.660888888888889e-05, "loss": 0.9266, "step": 4270 }, { "epoch": 34.168, "grad_norm": 31.069395065307617, "learning_rate": 3.660444444444445e-05, "loss": 1.0678, "step": 4271 }, { "epoch": 34.176, "grad_norm": 29.437862396240234, "learning_rate": 3.66e-05, "loss": 0.9909, "step": 4272 }, { "epoch": 34.184, "grad_norm": 21.52241325378418, "learning_rate": 3.659555555555556e-05, "loss": 1.5265, "step": 4273 }, { "epoch": 34.192, "grad_norm": 21.594303131103516, "learning_rate": 3.659111111111111e-05, "loss": 1.2529, "step": 4274 }, { "epoch": 34.2, "grad_norm": 31.460338592529297, "learning_rate": 3.6586666666666666e-05, "loss": 1.3948, "step": 4275 }, { "epoch": 34.208, "grad_norm": 156.20277404785156, "learning_rate": 3.658222222222222e-05, "loss": 1.1752, "step": 4276 }, { "epoch": 34.216, "grad_norm": 24.80820655822754, "learning_rate": 3.6577777777777776e-05, "loss": 1.0609, "step": 4277 }, { "epoch": 34.224, "grad_norm": 34.205108642578125, "learning_rate": 3.657333333333334e-05, "loss": 1.2456, "step": 4278 }, { "epoch": 34.232, "grad_norm": 18.59238052368164, "learning_rate": 3.656888888888889e-05, "loss": 1.0406, "step": 4279 }, { "epoch": 34.24, "grad_norm": 25.149232864379883, "learning_rate": 3.656444444444445e-05, "loss": 1.5467, "step": 4280 }, { "epoch": 34.248, "grad_norm": 22.79172134399414, "learning_rate": 3.656e-05, "loss": 1.1777, "step": 4281 }, { "epoch": 34.256, "grad_norm": 24.366376876831055, "learning_rate": 3.655555555555556e-05, "loss": 1.1056, "step": 4282 }, { "epoch": 34.264, "grad_norm": 33.93423843383789, "learning_rate": 3.655111111111111e-05, "loss": 1.2662, "step": 4283 }, { "epoch": 34.272, "grad_norm": 24.89275360107422, "learning_rate": 3.654666666666667e-05, "loss": 0.9156, "step": 4284 }, { "epoch": 34.28, "grad_norm": 21.432125091552734, "learning_rate": 3.654222222222222e-05, "loss": 1.333, "step": 4285 }, { "epoch": 34.288, "grad_norm": 18.26622200012207, "learning_rate": 3.653777777777778e-05, "loss": 1.0314, "step": 4286 }, { "epoch": 34.296, "grad_norm": 28.65655517578125, "learning_rate": 3.653333333333334e-05, "loss": 1.0324, "step": 4287 }, { "epoch": 34.304, "grad_norm": 621.365478515625, "learning_rate": 3.652888888888889e-05, "loss": 1.0418, "step": 4288 }, { "epoch": 34.312, "grad_norm": 27.07204818725586, "learning_rate": 3.652444444444444e-05, "loss": 1.1997, "step": 4289 }, { "epoch": 34.32, "grad_norm": 23.70124626159668, "learning_rate": 3.652e-05, "loss": 0.8666, "step": 4290 }, { "epoch": 34.328, "grad_norm": 30.05571746826172, "learning_rate": 3.651555555555556e-05, "loss": 1.084, "step": 4291 }, { "epoch": 34.336, "grad_norm": 53.240234375, "learning_rate": 3.651111111111111e-05, "loss": 1.1549, "step": 4292 }, { "epoch": 34.344, "grad_norm": 24.076406478881836, "learning_rate": 3.650666666666667e-05, "loss": 1.7499, "step": 4293 }, { "epoch": 34.352, "grad_norm": 34.65673065185547, "learning_rate": 3.650222222222223e-05, "loss": 1.1862, "step": 4294 }, { "epoch": 34.36, "grad_norm": 129.15782165527344, "learning_rate": 3.6497777777777784e-05, "loss": 1.6093, "step": 4295 }, { "epoch": 34.368, "grad_norm": 19.220489501953125, "learning_rate": 3.649333333333333e-05, "loss": 1.4258, "step": 4296 }, { "epoch": 34.376, "grad_norm": 20.416385650634766, "learning_rate": 3.648888888888889e-05, "loss": 1.2248, "step": 4297 }, { "epoch": 34.384, "grad_norm": 20.037403106689453, "learning_rate": 3.648444444444445e-05, "loss": 1.2348, "step": 4298 }, { "epoch": 34.392, "grad_norm": 20.823577880859375, "learning_rate": 3.648e-05, "loss": 1.3253, "step": 4299 }, { "epoch": 34.4, "grad_norm": 24.7029972076416, "learning_rate": 3.647555555555556e-05, "loss": 1.091, "step": 4300 }, { "epoch": 34.408, "grad_norm": 30.605926513671875, "learning_rate": 3.647111111111111e-05, "loss": 0.9173, "step": 4301 }, { "epoch": 34.416, "grad_norm": 35.423912048339844, "learning_rate": 3.646666666666667e-05, "loss": 1.2293, "step": 4302 }, { "epoch": 34.424, "grad_norm": 281.9477844238281, "learning_rate": 3.646222222222222e-05, "loss": 1.4488, "step": 4303 }, { "epoch": 34.432, "grad_norm": 75.09217834472656, "learning_rate": 3.645777777777778e-05, "loss": 1.2375, "step": 4304 }, { "epoch": 34.44, "grad_norm": 76.0108413696289, "learning_rate": 3.645333333333333e-05, "loss": 0.8652, "step": 4305 }, { "epoch": 34.448, "grad_norm": 19.441539764404297, "learning_rate": 3.644888888888889e-05, "loss": 1.3378, "step": 4306 }, { "epoch": 34.456, "grad_norm": 17.290037155151367, "learning_rate": 3.644444444444445e-05, "loss": 1.1461, "step": 4307 }, { "epoch": 34.464, "grad_norm": 20.769590377807617, "learning_rate": 3.6440000000000003e-05, "loss": 0.9858, "step": 4308 }, { "epoch": 34.472, "grad_norm": 26.142621994018555, "learning_rate": 3.643555555555556e-05, "loss": 1.153, "step": 4309 }, { "epoch": 34.48, "grad_norm": 17.553119659423828, "learning_rate": 3.643111111111111e-05, "loss": 1.1127, "step": 4310 }, { "epoch": 34.488, "grad_norm": 22.783658981323242, "learning_rate": 3.642666666666667e-05, "loss": 0.9483, "step": 4311 }, { "epoch": 34.496, "grad_norm": 32.167442321777344, "learning_rate": 3.642222222222222e-05, "loss": 1.4798, "step": 4312 }, { "epoch": 34.504, "grad_norm": 29.060152053833008, "learning_rate": 3.641777777777778e-05, "loss": 0.9433, "step": 4313 }, { "epoch": 34.512, "grad_norm": 24.91141128540039, "learning_rate": 3.641333333333333e-05, "loss": 1.5626, "step": 4314 }, { "epoch": 34.52, "grad_norm": 79.30438232421875, "learning_rate": 3.6408888888888894e-05, "loss": 0.95, "step": 4315 }, { "epoch": 34.528, "grad_norm": 43.2298583984375, "learning_rate": 3.640444444444445e-05, "loss": 0.9927, "step": 4316 }, { "epoch": 34.536, "grad_norm": 24.135028839111328, "learning_rate": 3.6400000000000004e-05, "loss": 1.4695, "step": 4317 }, { "epoch": 34.544, "grad_norm": 26.21227264404297, "learning_rate": 3.639555555555555e-05, "loss": 1.2626, "step": 4318 }, { "epoch": 34.552, "grad_norm": 31.403255462646484, "learning_rate": 3.6391111111111114e-05, "loss": 1.1831, "step": 4319 }, { "epoch": 34.56, "grad_norm": 22.604219436645508, "learning_rate": 3.638666666666667e-05, "loss": 1.431, "step": 4320 }, { "epoch": 34.568, "grad_norm": 22.42060089111328, "learning_rate": 3.638222222222222e-05, "loss": 0.855, "step": 4321 }, { "epoch": 34.576, "grad_norm": 32.28057861328125, "learning_rate": 3.637777777777778e-05, "loss": 1.2652, "step": 4322 }, { "epoch": 34.584, "grad_norm": 27.196460723876953, "learning_rate": 3.637333333333334e-05, "loss": 1.2249, "step": 4323 }, { "epoch": 34.592, "grad_norm": 14.886704444885254, "learning_rate": 3.6368888888888895e-05, "loss": 1.051, "step": 4324 }, { "epoch": 34.6, "grad_norm": 23.887338638305664, "learning_rate": 3.636444444444444e-05, "loss": 1.205, "step": 4325 }, { "epoch": 34.608, "grad_norm": 56.00489044189453, "learning_rate": 3.636e-05, "loss": 1.059, "step": 4326 }, { "epoch": 34.616, "grad_norm": 16.506988525390625, "learning_rate": 3.635555555555556e-05, "loss": 1.0828, "step": 4327 }, { "epoch": 34.624, "grad_norm": 20.03325653076172, "learning_rate": 3.6351111111111114e-05, "loss": 1.1923, "step": 4328 }, { "epoch": 34.632, "grad_norm": 85.27617645263672, "learning_rate": 3.634666666666667e-05, "loss": 1.8344, "step": 4329 }, { "epoch": 34.64, "grad_norm": 29.39356803894043, "learning_rate": 3.6342222222222224e-05, "loss": 1.1304, "step": 4330 }, { "epoch": 34.648, "grad_norm": 20.321565628051758, "learning_rate": 3.633777777777778e-05, "loss": 1.9918, "step": 4331 }, { "epoch": 34.656, "grad_norm": 27.52280044555664, "learning_rate": 3.633333333333333e-05, "loss": 1.3236, "step": 4332 }, { "epoch": 34.664, "grad_norm": 24.203765869140625, "learning_rate": 3.632888888888889e-05, "loss": 1.677, "step": 4333 }, { "epoch": 34.672, "grad_norm": 33.081871032714844, "learning_rate": 3.632444444444444e-05, "loss": 1.0049, "step": 4334 }, { "epoch": 34.68, "grad_norm": 41.97941207885742, "learning_rate": 3.6320000000000005e-05, "loss": 0.7996, "step": 4335 }, { "epoch": 34.688, "grad_norm": 21.639612197875977, "learning_rate": 3.631555555555556e-05, "loss": 1.1838, "step": 4336 }, { "epoch": 34.696, "grad_norm": 33.69153594970703, "learning_rate": 3.6311111111111114e-05, "loss": 0.9826, "step": 4337 }, { "epoch": 34.704, "grad_norm": 50.08519744873047, "learning_rate": 3.630666666666667e-05, "loss": 1.2281, "step": 4338 }, { "epoch": 34.712, "grad_norm": 34.40840148925781, "learning_rate": 3.6302222222222224e-05, "loss": 1.4634, "step": 4339 }, { "epoch": 34.72, "grad_norm": 73.29840850830078, "learning_rate": 3.629777777777778e-05, "loss": 0.9013, "step": 4340 }, { "epoch": 34.728, "grad_norm": 35.81261444091797, "learning_rate": 3.6293333333333334e-05, "loss": 1.3963, "step": 4341 }, { "epoch": 34.736, "grad_norm": 53.95508575439453, "learning_rate": 3.628888888888889e-05, "loss": 3.4532, "step": 4342 }, { "epoch": 34.744, "grad_norm": 68.73031616210938, "learning_rate": 3.628444444444445e-05, "loss": 0.9, "step": 4343 }, { "epoch": 34.752, "grad_norm": 98.7029037475586, "learning_rate": 3.6280000000000005e-05, "loss": 1.0447, "step": 4344 }, { "epoch": 34.76, "grad_norm": 27.57685661315918, "learning_rate": 3.627555555555556e-05, "loss": 1.16, "step": 4345 }, { "epoch": 34.768, "grad_norm": 25.653472900390625, "learning_rate": 3.627111111111111e-05, "loss": 0.8371, "step": 4346 }, { "epoch": 34.776, "grad_norm": 170.45860290527344, "learning_rate": 3.626666666666667e-05, "loss": 1.3153, "step": 4347 }, { "epoch": 34.784, "grad_norm": 28.956817626953125, "learning_rate": 3.6262222222222224e-05, "loss": 1.3511, "step": 4348 }, { "epoch": 34.792, "grad_norm": 47.57479476928711, "learning_rate": 3.625777777777778e-05, "loss": 0.9912, "step": 4349 }, { "epoch": 34.8, "grad_norm": 38.551239013671875, "learning_rate": 3.6253333333333334e-05, "loss": 1.3793, "step": 4350 }, { "epoch": 34.808, "grad_norm": 20.9029598236084, "learning_rate": 3.6248888888888896e-05, "loss": 1.2575, "step": 4351 }, { "epoch": 34.816, "grad_norm": 26.290685653686523, "learning_rate": 3.624444444444445e-05, "loss": 0.9347, "step": 4352 }, { "epoch": 34.824, "grad_norm": 33.13890075683594, "learning_rate": 3.624e-05, "loss": 1.1716, "step": 4353 }, { "epoch": 34.832, "grad_norm": 26.355337142944336, "learning_rate": 3.6235555555555553e-05, "loss": 2.5221, "step": 4354 }, { "epoch": 34.84, "grad_norm": 94.43643188476562, "learning_rate": 3.623111111111111e-05, "loss": 1.2365, "step": 4355 }, { "epoch": 34.848, "grad_norm": 30.96413803100586, "learning_rate": 3.622666666666667e-05, "loss": 2.2289, "step": 4356 }, { "epoch": 34.856, "grad_norm": 162.9969940185547, "learning_rate": 3.6222222222222225e-05, "loss": 1.0056, "step": 4357 }, { "epoch": 34.864, "grad_norm": 48.81777572631836, "learning_rate": 3.621777777777778e-05, "loss": 1.4658, "step": 4358 }, { "epoch": 34.872, "grad_norm": 33.833160400390625, "learning_rate": 3.6213333333333334e-05, "loss": 1.0333, "step": 4359 }, { "epoch": 34.88, "grad_norm": 85.19332885742188, "learning_rate": 3.620888888888889e-05, "loss": 1.5854, "step": 4360 }, { "epoch": 34.888, "grad_norm": 153.47720336914062, "learning_rate": 3.6204444444444444e-05, "loss": 0.8302, "step": 4361 }, { "epoch": 34.896, "grad_norm": 22.31074333190918, "learning_rate": 3.62e-05, "loss": 1.1739, "step": 4362 }, { "epoch": 34.904, "grad_norm": 20.99762535095215, "learning_rate": 3.6195555555555554e-05, "loss": 1.6053, "step": 4363 }, { "epoch": 34.912, "grad_norm": 31.73782730102539, "learning_rate": 3.6191111111111115e-05, "loss": 1.1905, "step": 4364 }, { "epoch": 34.92, "grad_norm": 95.57732391357422, "learning_rate": 3.618666666666667e-05, "loss": 1.8444, "step": 4365 }, { "epoch": 34.928, "grad_norm": 30.90903091430664, "learning_rate": 3.6182222222222225e-05, "loss": 1.2421, "step": 4366 }, { "epoch": 34.936, "grad_norm": 47.101959228515625, "learning_rate": 3.617777777777778e-05, "loss": 1.285, "step": 4367 }, { "epoch": 34.944, "grad_norm": 48.30484390258789, "learning_rate": 3.6173333333333335e-05, "loss": 1.2921, "step": 4368 }, { "epoch": 34.952, "grad_norm": 32.50209426879883, "learning_rate": 3.616888888888889e-05, "loss": 0.7204, "step": 4369 }, { "epoch": 34.96, "grad_norm": 17.575700759887695, "learning_rate": 3.6164444444444445e-05, "loss": 0.9996, "step": 4370 }, { "epoch": 34.968, "grad_norm": 19.439701080322266, "learning_rate": 3.616e-05, "loss": 0.8611, "step": 4371 }, { "epoch": 34.976, "grad_norm": 38.76561737060547, "learning_rate": 3.615555555555556e-05, "loss": 1.1994, "step": 4372 }, { "epoch": 34.984, "grad_norm": 27.295976638793945, "learning_rate": 3.6151111111111116e-05, "loss": 1.5159, "step": 4373 }, { "epoch": 34.992, "grad_norm": 32.99669647216797, "learning_rate": 3.614666666666667e-05, "loss": 1.0256, "step": 4374 }, { "epoch": 35.0, "grad_norm": 28.46596336364746, "learning_rate": 3.614222222222222e-05, "loss": 1.12, "step": 4375 }, { "epoch": 35.0, "eval_loss": 1.1882301568984985, "eval_map": 0.3275, "eval_map_50": 0.6632, "eval_map_75": 0.2963, "eval_map_Coverall": 0.5242, "eval_map_Face_Shield": 0.3655, "eval_map_Gloves": 0.2323, "eval_map_Goggles": 0.1347, "eval_map_Mask": 0.3807, "eval_map_large": 0.5005, "eval_map_medium": 0.2555, "eval_map_small": 0.1495, "eval_mar_1": 0.2872, "eval_mar_10": 0.4957, "eval_mar_100": 0.5051, "eval_mar_100_Coverall": 0.6733, "eval_mar_100_Face_Shield": 0.5882, "eval_mar_100_Gloves": 0.3869, "eval_mar_100_Goggles": 0.4, "eval_mar_100_Mask": 0.4769, "eval_mar_large": 0.6388, "eval_mar_medium": 0.4323, "eval_mar_small": 0.2286, "eval_runtime": 0.9121, "eval_samples_per_second": 31.794, "eval_steps_per_second": 2.193, "step": 4375 }, { "epoch": 35.008, "grad_norm": 31.453813552856445, "learning_rate": 3.613777777777778e-05, "loss": 1.2164, "step": 4376 }, { "epoch": 35.016, "grad_norm": 31.516033172607422, "learning_rate": 3.6133333333333335e-05, "loss": 0.9649, "step": 4377 }, { "epoch": 35.024, "grad_norm": 23.197349548339844, "learning_rate": 3.612888888888889e-05, "loss": 1.1338, "step": 4378 }, { "epoch": 35.032, "grad_norm": 24.228931427001953, "learning_rate": 3.6124444444444445e-05, "loss": 1.0783, "step": 4379 }, { "epoch": 35.04, "grad_norm": 35.79087829589844, "learning_rate": 3.6120000000000007e-05, "loss": 1.3927, "step": 4380 }, { "epoch": 35.048, "grad_norm": 39.53689956665039, "learning_rate": 3.611555555555556e-05, "loss": 1.0769, "step": 4381 }, { "epoch": 35.056, "grad_norm": 25.23543930053711, "learning_rate": 3.611111111111111e-05, "loss": 1.1405, "step": 4382 }, { "epoch": 35.064, "grad_norm": 21.579265594482422, "learning_rate": 3.6106666666666664e-05, "loss": 1.1348, "step": 4383 }, { "epoch": 35.072, "grad_norm": 77.87621307373047, "learning_rate": 3.6102222222222226e-05, "loss": 1.7584, "step": 4384 }, { "epoch": 35.08, "grad_norm": 21.48822593688965, "learning_rate": 3.609777777777778e-05, "loss": 1.4846, "step": 4385 }, { "epoch": 35.088, "grad_norm": 26.626068115234375, "learning_rate": 3.6093333333333336e-05, "loss": 1.0648, "step": 4386 }, { "epoch": 35.096, "grad_norm": 16.28133201599121, "learning_rate": 3.608888888888889e-05, "loss": 1.2074, "step": 4387 }, { "epoch": 35.104, "grad_norm": 26.014713287353516, "learning_rate": 3.6084444444444445e-05, "loss": 1.051, "step": 4388 }, { "epoch": 35.112, "grad_norm": 21.398427963256836, "learning_rate": 3.608e-05, "loss": 1.1998, "step": 4389 }, { "epoch": 35.12, "grad_norm": 22.293495178222656, "learning_rate": 3.6075555555555555e-05, "loss": 1.2423, "step": 4390 }, { "epoch": 35.128, "grad_norm": 31.543807983398438, "learning_rate": 3.607111111111111e-05, "loss": 1.6027, "step": 4391 }, { "epoch": 35.136, "grad_norm": 22.90938949584961, "learning_rate": 3.606666666666667e-05, "loss": 1.2153, "step": 4392 }, { "epoch": 35.144, "grad_norm": 91.53954315185547, "learning_rate": 3.6062222222222226e-05, "loss": 1.4595, "step": 4393 }, { "epoch": 35.152, "grad_norm": 35.14376449584961, "learning_rate": 3.605777777777778e-05, "loss": 1.3237, "step": 4394 }, { "epoch": 35.16, "grad_norm": 58.73846435546875, "learning_rate": 3.6053333333333336e-05, "loss": 0.888, "step": 4395 }, { "epoch": 35.168, "grad_norm": 28.38874626159668, "learning_rate": 3.604888888888889e-05, "loss": 2.5681, "step": 4396 }, { "epoch": 35.176, "grad_norm": 54.142967224121094, "learning_rate": 3.6044444444444446e-05, "loss": 1.2806, "step": 4397 }, { "epoch": 35.184, "grad_norm": 36.359703063964844, "learning_rate": 3.604e-05, "loss": 1.3766, "step": 4398 }, { "epoch": 35.192, "grad_norm": 20.768781661987305, "learning_rate": 3.6035555555555555e-05, "loss": 0.6931, "step": 4399 }, { "epoch": 35.2, "grad_norm": 42.05064392089844, "learning_rate": 3.603111111111112e-05, "loss": 1.1634, "step": 4400 }, { "epoch": 35.208, "grad_norm": 19.598583221435547, "learning_rate": 3.602666666666667e-05, "loss": 1.479, "step": 4401 }, { "epoch": 35.216, "grad_norm": 17.133211135864258, "learning_rate": 3.602222222222223e-05, "loss": 1.0643, "step": 4402 }, { "epoch": 35.224, "grad_norm": 28.3166446685791, "learning_rate": 3.6017777777777775e-05, "loss": 1.2002, "step": 4403 }, { "epoch": 35.232, "grad_norm": 37.675350189208984, "learning_rate": 3.6013333333333336e-05, "loss": 1.2043, "step": 4404 }, { "epoch": 35.24, "grad_norm": 39.63308334350586, "learning_rate": 3.600888888888889e-05, "loss": 1.2326, "step": 4405 }, { "epoch": 35.248, "grad_norm": 48.93975067138672, "learning_rate": 3.6004444444444446e-05, "loss": 1.5488, "step": 4406 }, { "epoch": 35.256, "grad_norm": 33.73335647583008, "learning_rate": 3.6e-05, "loss": 3.1366, "step": 4407 }, { "epoch": 35.264, "grad_norm": 35.42158889770508, "learning_rate": 3.5995555555555556e-05, "loss": 1.2497, "step": 4408 }, { "epoch": 35.272, "grad_norm": 65.28055572509766, "learning_rate": 3.599111111111112e-05, "loss": 1.2253, "step": 4409 }, { "epoch": 35.28, "grad_norm": 19.987409591674805, "learning_rate": 3.5986666666666665e-05, "loss": 1.0993, "step": 4410 }, { "epoch": 35.288, "grad_norm": 46.91200637817383, "learning_rate": 3.598222222222222e-05, "loss": 1.875, "step": 4411 }, { "epoch": 35.296, "grad_norm": 20.43638801574707, "learning_rate": 3.5977777777777775e-05, "loss": 1.4444, "step": 4412 }, { "epoch": 35.304, "grad_norm": 29.610273361206055, "learning_rate": 3.597333333333334e-05, "loss": 1.1976, "step": 4413 }, { "epoch": 35.312, "grad_norm": 44.666839599609375, "learning_rate": 3.596888888888889e-05, "loss": 1.0074, "step": 4414 }, { "epoch": 35.32, "grad_norm": 40.60602569580078, "learning_rate": 3.5964444444444446e-05, "loss": 1.2156, "step": 4415 }, { "epoch": 35.328, "grad_norm": 20.16448974609375, "learning_rate": 3.596e-05, "loss": 1.075, "step": 4416 }, { "epoch": 35.336, "grad_norm": 25.587568283081055, "learning_rate": 3.5955555555555556e-05, "loss": 0.9387, "step": 4417 }, { "epoch": 35.344, "grad_norm": 26.13882064819336, "learning_rate": 3.595111111111111e-05, "loss": 0.9367, "step": 4418 }, { "epoch": 35.352, "grad_norm": 24.0405330657959, "learning_rate": 3.5946666666666666e-05, "loss": 1.0652, "step": 4419 }, { "epoch": 35.36, "grad_norm": 22.108413696289062, "learning_rate": 3.594222222222222e-05, "loss": 0.9659, "step": 4420 }, { "epoch": 35.368, "grad_norm": 43.184757232666016, "learning_rate": 3.593777777777778e-05, "loss": 1.1947, "step": 4421 }, { "epoch": 35.376, "grad_norm": 40.329124450683594, "learning_rate": 3.593333333333334e-05, "loss": 1.7035, "step": 4422 }, { "epoch": 35.384, "grad_norm": 60.00286102294922, "learning_rate": 3.592888888888889e-05, "loss": 0.836, "step": 4423 }, { "epoch": 35.392, "grad_norm": 28.964515686035156, "learning_rate": 3.592444444444445e-05, "loss": 1.0595, "step": 4424 }, { "epoch": 35.4, "grad_norm": 29.172531127929688, "learning_rate": 3.592e-05, "loss": 0.8508, "step": 4425 }, { "epoch": 35.408, "grad_norm": 28.207683563232422, "learning_rate": 3.5915555555555557e-05, "loss": 1.0819, "step": 4426 }, { "epoch": 35.416, "grad_norm": 62.303558349609375, "learning_rate": 3.591111111111111e-05, "loss": 0.949, "step": 4427 }, { "epoch": 35.424, "grad_norm": 39.123836517333984, "learning_rate": 3.5906666666666666e-05, "loss": 0.7951, "step": 4428 }, { "epoch": 35.432, "grad_norm": 16.138774871826172, "learning_rate": 3.590222222222223e-05, "loss": 1.1256, "step": 4429 }, { "epoch": 35.44, "grad_norm": 22.048974990844727, "learning_rate": 3.589777777777778e-05, "loss": 1.1024, "step": 4430 }, { "epoch": 35.448, "grad_norm": 27.573177337646484, "learning_rate": 3.589333333333334e-05, "loss": 1.1546, "step": 4431 }, { "epoch": 35.456, "grad_norm": 18.534242630004883, "learning_rate": 3.5888888888888886e-05, "loss": 1.2903, "step": 4432 }, { "epoch": 35.464, "grad_norm": 36.911537170410156, "learning_rate": 3.588444444444445e-05, "loss": 1.0356, "step": 4433 }, { "epoch": 35.472, "grad_norm": 18.241710662841797, "learning_rate": 3.588e-05, "loss": 0.9547, "step": 4434 }, { "epoch": 35.48, "grad_norm": 112.02188873291016, "learning_rate": 3.587555555555556e-05, "loss": 1.4572, "step": 4435 }, { "epoch": 35.488, "grad_norm": 43.100399017333984, "learning_rate": 3.587111111111111e-05, "loss": 0.9947, "step": 4436 }, { "epoch": 35.496, "grad_norm": 20.19296646118164, "learning_rate": 3.586666666666667e-05, "loss": 0.9092, "step": 4437 }, { "epoch": 35.504, "grad_norm": 31.379281997680664, "learning_rate": 3.586222222222223e-05, "loss": 1.222, "step": 4438 }, { "epoch": 35.512, "grad_norm": 28.97528076171875, "learning_rate": 3.5857777777777776e-05, "loss": 1.1842, "step": 4439 }, { "epoch": 35.52, "grad_norm": 28.81976890563965, "learning_rate": 3.585333333333333e-05, "loss": 2.2135, "step": 4440 }, { "epoch": 35.528, "grad_norm": 29.232982635498047, "learning_rate": 3.584888888888889e-05, "loss": 1.0943, "step": 4441 }, { "epoch": 35.536, "grad_norm": 32.340843200683594, "learning_rate": 3.584444444444445e-05, "loss": 1.226, "step": 4442 }, { "epoch": 35.544, "grad_norm": 22.333534240722656, "learning_rate": 3.584e-05, "loss": 0.9633, "step": 4443 }, { "epoch": 35.552, "grad_norm": 41.86981964111328, "learning_rate": 3.583555555555556e-05, "loss": 1.1254, "step": 4444 }, { "epoch": 35.56, "grad_norm": 128.7765655517578, "learning_rate": 3.583111111111111e-05, "loss": 1.0853, "step": 4445 }, { "epoch": 35.568, "grad_norm": 51.92301559448242, "learning_rate": 3.582666666666667e-05, "loss": 1.1704, "step": 4446 }, { "epoch": 35.576, "grad_norm": 47.13789749145508, "learning_rate": 3.582222222222222e-05, "loss": 0.9708, "step": 4447 }, { "epoch": 35.584, "grad_norm": 27.768901824951172, "learning_rate": 3.581777777777778e-05, "loss": 2.5014, "step": 4448 }, { "epoch": 35.592, "grad_norm": 35.67829513549805, "learning_rate": 3.581333333333334e-05, "loss": 1.2267, "step": 4449 }, { "epoch": 35.6, "grad_norm": 25.30986976623535, "learning_rate": 3.580888888888889e-05, "loss": 1.3456, "step": 4450 }, { "epoch": 35.608, "grad_norm": 40.68380355834961, "learning_rate": 3.580444444444445e-05, "loss": 1.3091, "step": 4451 }, { "epoch": 35.616, "grad_norm": 19.809188842773438, "learning_rate": 3.58e-05, "loss": 1.4825, "step": 4452 }, { "epoch": 35.624, "grad_norm": 26.047147750854492, "learning_rate": 3.579555555555556e-05, "loss": 1.4192, "step": 4453 }, { "epoch": 35.632, "grad_norm": 121.25592041015625, "learning_rate": 3.579111111111111e-05, "loss": 1.839, "step": 4454 }, { "epoch": 35.64, "grad_norm": 29.025848388671875, "learning_rate": 3.578666666666667e-05, "loss": 0.9945, "step": 4455 }, { "epoch": 35.648, "grad_norm": 29.58808135986328, "learning_rate": 3.578222222222222e-05, "loss": 0.824, "step": 4456 }, { "epoch": 35.656, "grad_norm": 257.0087585449219, "learning_rate": 3.577777777777778e-05, "loss": 1.1163, "step": 4457 }, { "epoch": 35.664, "grad_norm": 108.50547790527344, "learning_rate": 3.577333333333334e-05, "loss": 1.1763, "step": 4458 }, { "epoch": 35.672, "grad_norm": 22.887737274169922, "learning_rate": 3.5768888888888894e-05, "loss": 1.1137, "step": 4459 }, { "epoch": 35.68, "grad_norm": 19.14449691772461, "learning_rate": 3.576444444444444e-05, "loss": 1.0484, "step": 4460 }, { "epoch": 35.688, "grad_norm": 33.822715759277344, "learning_rate": 3.5759999999999996e-05, "loss": 0.8394, "step": 4461 }, { "epoch": 35.696, "grad_norm": 27.713258743286133, "learning_rate": 3.575555555555556e-05, "loss": 1.4852, "step": 4462 }, { "epoch": 35.704, "grad_norm": 40.587886810302734, "learning_rate": 3.575111111111111e-05, "loss": 1.2845, "step": 4463 }, { "epoch": 35.712, "grad_norm": 23.45922088623047, "learning_rate": 3.574666666666667e-05, "loss": 1.0091, "step": 4464 }, { "epoch": 35.72, "grad_norm": 30.420860290527344, "learning_rate": 3.574222222222222e-05, "loss": 1.3535, "step": 4465 }, { "epoch": 35.728, "grad_norm": 22.564680099487305, "learning_rate": 3.5737777777777784e-05, "loss": 0.9979, "step": 4466 }, { "epoch": 35.736, "grad_norm": 31.06162452697754, "learning_rate": 3.573333333333333e-05, "loss": 1.2189, "step": 4467 }, { "epoch": 35.744, "grad_norm": 16.618017196655273, "learning_rate": 3.572888888888889e-05, "loss": 1.0104, "step": 4468 }, { "epoch": 35.752, "grad_norm": 30.99930763244629, "learning_rate": 3.572444444444444e-05, "loss": 0.7596, "step": 4469 }, { "epoch": 35.76, "grad_norm": 35.118526458740234, "learning_rate": 3.5720000000000004e-05, "loss": 1.0062, "step": 4470 }, { "epoch": 35.768, "grad_norm": 29.69176483154297, "learning_rate": 3.571555555555556e-05, "loss": 0.9408, "step": 4471 }, { "epoch": 35.776, "grad_norm": 43.287845611572266, "learning_rate": 3.571111111111111e-05, "loss": 1.221, "step": 4472 }, { "epoch": 35.784, "grad_norm": 18.91431427001953, "learning_rate": 3.570666666666667e-05, "loss": 1.1368, "step": 4473 }, { "epoch": 35.792, "grad_norm": 97.25041961669922, "learning_rate": 3.570222222222222e-05, "loss": 1.3511, "step": 4474 }, { "epoch": 35.8, "grad_norm": 31.532878875732422, "learning_rate": 3.569777777777778e-05, "loss": 1.9852, "step": 4475 }, { "epoch": 35.808, "grad_norm": 34.60083770751953, "learning_rate": 3.569333333333333e-05, "loss": 1.228, "step": 4476 }, { "epoch": 35.816, "grad_norm": 19.522558212280273, "learning_rate": 3.568888888888889e-05, "loss": 1.1403, "step": 4477 }, { "epoch": 35.824, "grad_norm": 36.766082763671875, "learning_rate": 3.568444444444445e-05, "loss": 0.9783, "step": 4478 }, { "epoch": 35.832, "grad_norm": 27.18716812133789, "learning_rate": 3.5680000000000004e-05, "loss": 1.7349, "step": 4479 }, { "epoch": 35.84, "grad_norm": 165.19482421875, "learning_rate": 3.567555555555556e-05, "loss": 1.1626, "step": 4480 }, { "epoch": 35.848, "grad_norm": 37.353363037109375, "learning_rate": 3.5671111111111114e-05, "loss": 1.5134, "step": 4481 }, { "epoch": 35.856, "grad_norm": 50.68967056274414, "learning_rate": 3.566666666666667e-05, "loss": 1.2102, "step": 4482 }, { "epoch": 35.864, "grad_norm": 34.78771209716797, "learning_rate": 3.566222222222222e-05, "loss": 1.346, "step": 4483 }, { "epoch": 35.872, "grad_norm": 82.42778778076172, "learning_rate": 3.565777777777778e-05, "loss": 1.0225, "step": 4484 }, { "epoch": 35.88, "grad_norm": 23.932275772094727, "learning_rate": 3.565333333333333e-05, "loss": 1.0265, "step": 4485 }, { "epoch": 35.888, "grad_norm": 89.91390991210938, "learning_rate": 3.5648888888888895e-05, "loss": 1.1738, "step": 4486 }, { "epoch": 35.896, "grad_norm": 19.517810821533203, "learning_rate": 3.564444444444445e-05, "loss": 0.9465, "step": 4487 }, { "epoch": 35.904, "grad_norm": 22.32008171081543, "learning_rate": 3.5640000000000004e-05, "loss": 0.9264, "step": 4488 }, { "epoch": 35.912, "grad_norm": 30.67711067199707, "learning_rate": 3.563555555555555e-05, "loss": 1.0169, "step": 4489 }, { "epoch": 35.92, "grad_norm": 31.112049102783203, "learning_rate": 3.5631111111111114e-05, "loss": 1.1972, "step": 4490 }, { "epoch": 35.928, "grad_norm": 30.870363235473633, "learning_rate": 3.562666666666667e-05, "loss": 1.0616, "step": 4491 }, { "epoch": 35.936, "grad_norm": 23.225244522094727, "learning_rate": 3.5622222222222224e-05, "loss": 1.3658, "step": 4492 }, { "epoch": 35.944, "grad_norm": 19.52179718017578, "learning_rate": 3.561777777777778e-05, "loss": 1.4617, "step": 4493 }, { "epoch": 35.952, "grad_norm": 34.74064636230469, "learning_rate": 3.561333333333334e-05, "loss": 1.3396, "step": 4494 }, { "epoch": 35.96, "grad_norm": 26.48849868774414, "learning_rate": 3.560888888888889e-05, "loss": 1.0627, "step": 4495 }, { "epoch": 35.968, "grad_norm": 27.35248374938965, "learning_rate": 3.560444444444444e-05, "loss": 0.9673, "step": 4496 }, { "epoch": 35.976, "grad_norm": 39.258724212646484, "learning_rate": 3.56e-05, "loss": 1.3993, "step": 4497 }, { "epoch": 35.984, "grad_norm": 23.74018096923828, "learning_rate": 3.559555555555556e-05, "loss": 1.3812, "step": 4498 }, { "epoch": 35.992, "grad_norm": 29.178226470947266, "learning_rate": 3.5591111111111114e-05, "loss": 1.0401, "step": 4499 }, { "epoch": 36.0, "grad_norm": 27.70631217956543, "learning_rate": 3.558666666666667e-05, "loss": 1.1094, "step": 4500 }, { "epoch": 36.0, "eval_loss": 1.2110183238983154, "eval_map": 0.3282, "eval_map_50": 0.6505, "eval_map_75": 0.284, "eval_map_Coverall": 0.5215, "eval_map_Face_Shield": 0.357, "eval_map_Gloves": 0.2521, "eval_map_Goggles": 0.1701, "eval_map_Mask": 0.3404, "eval_map_large": 0.5039, "eval_map_medium": 0.2166, "eval_map_small": 0.2435, "eval_mar_1": 0.2889, "eval_mar_10": 0.5267, "eval_mar_100": 0.5425, "eval_mar_100_Coverall": 0.6689, "eval_mar_100_Face_Shield": 0.6765, "eval_mar_100_Gloves": 0.4082, "eval_mar_100_Goggles": 0.4938, "eval_mar_100_Mask": 0.4654, "eval_mar_large": 0.721, "eval_mar_medium": 0.4131, "eval_mar_small": 0.3482, "eval_runtime": 0.929, "eval_samples_per_second": 31.217, "eval_steps_per_second": 2.153, "step": 4500 }, { "epoch": 36.008, "grad_norm": 22.438077926635742, "learning_rate": 3.5582222222222224e-05, "loss": 1.2212, "step": 4501 }, { "epoch": 36.016, "grad_norm": 26.273706436157227, "learning_rate": 3.557777777777778e-05, "loss": 1.1981, "step": 4502 }, { "epoch": 36.024, "grad_norm": 23.138551712036133, "learning_rate": 3.5573333333333334e-05, "loss": 0.9154, "step": 4503 }, { "epoch": 36.032, "grad_norm": 25.820255279541016, "learning_rate": 3.556888888888889e-05, "loss": 0.9446, "step": 4504 }, { "epoch": 36.04, "grad_norm": 29.342845916748047, "learning_rate": 3.5564444444444444e-05, "loss": 1.197, "step": 4505 }, { "epoch": 36.048, "grad_norm": 37.77339172363281, "learning_rate": 3.5560000000000005e-05, "loss": 1.4504, "step": 4506 }, { "epoch": 36.056, "grad_norm": 33.121620178222656, "learning_rate": 3.555555555555556e-05, "loss": 0.8414, "step": 4507 }, { "epoch": 36.064, "grad_norm": 68.50117492675781, "learning_rate": 3.5551111111111115e-05, "loss": 1.3227, "step": 4508 }, { "epoch": 36.072, "grad_norm": 31.342926025390625, "learning_rate": 3.554666666666667e-05, "loss": 1.5207, "step": 4509 }, { "epoch": 36.08, "grad_norm": 45.650779724121094, "learning_rate": 3.5542222222222225e-05, "loss": 1.4381, "step": 4510 }, { "epoch": 36.088, "grad_norm": 37.147804260253906, "learning_rate": 3.553777777777778e-05, "loss": 1.3238, "step": 4511 }, { "epoch": 36.096, "grad_norm": 14.546359062194824, "learning_rate": 3.5533333333333334e-05, "loss": 0.8643, "step": 4512 }, { "epoch": 36.104, "grad_norm": 37.64516830444336, "learning_rate": 3.552888888888889e-05, "loss": 0.9451, "step": 4513 }, { "epoch": 36.112, "grad_norm": 28.11725425720215, "learning_rate": 3.5524444444444444e-05, "loss": 2.1438, "step": 4514 }, { "epoch": 36.12, "grad_norm": 21.614933013916016, "learning_rate": 3.5520000000000006e-05, "loss": 1.1018, "step": 4515 }, { "epoch": 36.128, "grad_norm": 18.466581344604492, "learning_rate": 3.551555555555556e-05, "loss": 1.051, "step": 4516 }, { "epoch": 36.136, "grad_norm": 31.435312271118164, "learning_rate": 3.551111111111111e-05, "loss": 1.7193, "step": 4517 }, { "epoch": 36.144, "grad_norm": 34.50068664550781, "learning_rate": 3.550666666666666e-05, "loss": 1.452, "step": 4518 }, { "epoch": 36.152, "grad_norm": 69.10759735107422, "learning_rate": 3.5502222222222225e-05, "loss": 1.4167, "step": 4519 }, { "epoch": 36.16, "grad_norm": 34.87308883666992, "learning_rate": 3.549777777777778e-05, "loss": 1.407, "step": 4520 }, { "epoch": 36.168, "grad_norm": 26.10576820373535, "learning_rate": 3.5493333333333335e-05, "loss": 0.7943, "step": 4521 }, { "epoch": 36.176, "grad_norm": 29.84587860107422, "learning_rate": 3.548888888888889e-05, "loss": 0.5888, "step": 4522 }, { "epoch": 36.184, "grad_norm": 30.327878952026367, "learning_rate": 3.548444444444445e-05, "loss": 1.0606, "step": 4523 }, { "epoch": 36.192, "grad_norm": 31.977642059326172, "learning_rate": 3.548e-05, "loss": 1.6179, "step": 4524 }, { "epoch": 36.2, "grad_norm": 42.52082443237305, "learning_rate": 3.5475555555555554e-05, "loss": 1.6366, "step": 4525 }, { "epoch": 36.208, "grad_norm": 123.6769027709961, "learning_rate": 3.547111111111111e-05, "loss": 1.09, "step": 4526 }, { "epoch": 36.216, "grad_norm": 18.224401473999023, "learning_rate": 3.546666666666667e-05, "loss": 1.201, "step": 4527 }, { "epoch": 36.224, "grad_norm": 35.415706634521484, "learning_rate": 3.5462222222222225e-05, "loss": 2.3447, "step": 4528 }, { "epoch": 36.232, "grad_norm": 21.546024322509766, "learning_rate": 3.545777777777778e-05, "loss": 0.9219, "step": 4529 }, { "epoch": 36.24, "grad_norm": 40.86906433105469, "learning_rate": 3.5453333333333335e-05, "loss": 0.8952, "step": 4530 }, { "epoch": 36.248, "grad_norm": 19.991575241088867, "learning_rate": 3.544888888888889e-05, "loss": 1.1193, "step": 4531 }, { "epoch": 36.256, "grad_norm": 43.17687225341797, "learning_rate": 3.5444444444444445e-05, "loss": 1.2237, "step": 4532 }, { "epoch": 36.264, "grad_norm": 26.82200050354004, "learning_rate": 3.544e-05, "loss": 2.0389, "step": 4533 }, { "epoch": 36.272, "grad_norm": 62.18055725097656, "learning_rate": 3.5435555555555554e-05, "loss": 1.438, "step": 4534 }, { "epoch": 36.28, "grad_norm": 37.80363464355469, "learning_rate": 3.5431111111111116e-05, "loss": 1.0511, "step": 4535 }, { "epoch": 36.288, "grad_norm": 20.851186752319336, "learning_rate": 3.542666666666667e-05, "loss": 1.1862, "step": 4536 }, { "epoch": 36.296, "grad_norm": 38.63617706298828, "learning_rate": 3.5422222222222226e-05, "loss": 1.3068, "step": 4537 }, { "epoch": 36.304, "grad_norm": 36.892547607421875, "learning_rate": 3.541777777777778e-05, "loss": 1.0559, "step": 4538 }, { "epoch": 36.312, "grad_norm": 19.445392608642578, "learning_rate": 3.5413333333333335e-05, "loss": 1.3054, "step": 4539 }, { "epoch": 36.32, "grad_norm": 28.222190856933594, "learning_rate": 3.540888888888889e-05, "loss": 1.0969, "step": 4540 }, { "epoch": 36.328, "grad_norm": 68.27243041992188, "learning_rate": 3.5404444444444445e-05, "loss": 1.6352, "step": 4541 }, { "epoch": 36.336, "grad_norm": 25.305927276611328, "learning_rate": 3.54e-05, "loss": 1.0701, "step": 4542 }, { "epoch": 36.344, "grad_norm": 17.00455093383789, "learning_rate": 3.539555555555556e-05, "loss": 1.1962, "step": 4543 }, { "epoch": 36.352, "grad_norm": 33.39599609375, "learning_rate": 3.5391111111111116e-05, "loss": 1.2315, "step": 4544 }, { "epoch": 36.36, "grad_norm": 20.2470703125, "learning_rate": 3.538666666666667e-05, "loss": 1.2948, "step": 4545 }, { "epoch": 36.368, "grad_norm": 27.29989242553711, "learning_rate": 3.538222222222222e-05, "loss": 1.175, "step": 4546 }, { "epoch": 36.376, "grad_norm": 30.19513702392578, "learning_rate": 3.537777777777778e-05, "loss": 0.9492, "step": 4547 }, { "epoch": 36.384, "grad_norm": 25.208972930908203, "learning_rate": 3.5373333333333336e-05, "loss": 1.1421, "step": 4548 }, { "epoch": 36.392, "grad_norm": 31.988100051879883, "learning_rate": 3.536888888888889e-05, "loss": 3.033, "step": 4549 }, { "epoch": 36.4, "grad_norm": 29.954465866088867, "learning_rate": 3.5364444444444445e-05, "loss": 0.8862, "step": 4550 }, { "epoch": 36.408, "grad_norm": 71.26689147949219, "learning_rate": 3.536000000000001e-05, "loss": 1.4885, "step": 4551 }, { "epoch": 36.416, "grad_norm": 43.164039611816406, "learning_rate": 3.5355555555555555e-05, "loss": 0.714, "step": 4552 }, { "epoch": 36.424, "grad_norm": 23.37006378173828, "learning_rate": 3.535111111111111e-05, "loss": 1.4968, "step": 4553 }, { "epoch": 36.432, "grad_norm": 47.165283203125, "learning_rate": 3.5346666666666665e-05, "loss": 1.006, "step": 4554 }, { "epoch": 36.44, "grad_norm": 24.55790138244629, "learning_rate": 3.5342222222222226e-05, "loss": 1.1078, "step": 4555 }, { "epoch": 36.448, "grad_norm": 38.51036834716797, "learning_rate": 3.533777777777778e-05, "loss": 1.5917, "step": 4556 }, { "epoch": 36.456, "grad_norm": 16.785566329956055, "learning_rate": 3.5333333333333336e-05, "loss": 0.8612, "step": 4557 }, { "epoch": 36.464, "grad_norm": 123.10285949707031, "learning_rate": 3.532888888888889e-05, "loss": 1.3615, "step": 4558 }, { "epoch": 36.472, "grad_norm": 28.37275505065918, "learning_rate": 3.5324444444444446e-05, "loss": 1.4499, "step": 4559 }, { "epoch": 36.48, "grad_norm": 48.385929107666016, "learning_rate": 3.532e-05, "loss": 1.7213, "step": 4560 }, { "epoch": 36.488, "grad_norm": 28.610576629638672, "learning_rate": 3.5315555555555555e-05, "loss": 1.8646, "step": 4561 }, { "epoch": 36.496, "grad_norm": 38.696414947509766, "learning_rate": 3.531111111111111e-05, "loss": 1.2456, "step": 4562 }, { "epoch": 36.504, "grad_norm": 19.402809143066406, "learning_rate": 3.5306666666666665e-05, "loss": 1.1883, "step": 4563 }, { "epoch": 36.512, "grad_norm": 20.503629684448242, "learning_rate": 3.530222222222223e-05, "loss": 1.0099, "step": 4564 }, { "epoch": 36.52, "grad_norm": 16.21599578857422, "learning_rate": 3.529777777777778e-05, "loss": 1.0967, "step": 4565 }, { "epoch": 36.528, "grad_norm": 22.527130126953125, "learning_rate": 3.5293333333333336e-05, "loss": 1.1378, "step": 4566 }, { "epoch": 36.536, "grad_norm": 23.54560661315918, "learning_rate": 3.528888888888889e-05, "loss": 1.3101, "step": 4567 }, { "epoch": 36.544, "grad_norm": 24.47981071472168, "learning_rate": 3.5284444444444446e-05, "loss": 1.4685, "step": 4568 }, { "epoch": 36.552, "grad_norm": 18.49004554748535, "learning_rate": 3.528e-05, "loss": 1.3093, "step": 4569 }, { "epoch": 36.56, "grad_norm": 26.300281524658203, "learning_rate": 3.5275555555555556e-05, "loss": 1.4106, "step": 4570 }, { "epoch": 36.568, "grad_norm": 20.067392349243164, "learning_rate": 3.527111111111111e-05, "loss": 1.0995, "step": 4571 }, { "epoch": 36.576, "grad_norm": 31.317495346069336, "learning_rate": 3.526666666666667e-05, "loss": 1.2248, "step": 4572 }, { "epoch": 36.584, "grad_norm": 31.094806671142578, "learning_rate": 3.526222222222223e-05, "loss": 0.749, "step": 4573 }, { "epoch": 36.592, "grad_norm": 181.1944580078125, "learning_rate": 3.5257777777777775e-05, "loss": 1.317, "step": 4574 }, { "epoch": 36.6, "grad_norm": 20.702030181884766, "learning_rate": 3.525333333333333e-05, "loss": 1.3967, "step": 4575 }, { "epoch": 36.608, "grad_norm": 37.56708526611328, "learning_rate": 3.524888888888889e-05, "loss": 1.3138, "step": 4576 }, { "epoch": 36.616, "grad_norm": 12.939963340759277, "learning_rate": 3.5244444444444447e-05, "loss": 0.9025, "step": 4577 }, { "epoch": 36.624, "grad_norm": 32.58252716064453, "learning_rate": 3.524e-05, "loss": 1.0806, "step": 4578 }, { "epoch": 36.632, "grad_norm": 33.616371154785156, "learning_rate": 3.5235555555555556e-05, "loss": 1.5885, "step": 4579 }, { "epoch": 36.64, "grad_norm": 21.47886085510254, "learning_rate": 3.523111111111112e-05, "loss": 1.2912, "step": 4580 }, { "epoch": 36.648, "grad_norm": 90.73450469970703, "learning_rate": 3.5226666666666666e-05, "loss": 1.3357, "step": 4581 }, { "epoch": 36.656, "grad_norm": 20.726760864257812, "learning_rate": 3.522222222222222e-05, "loss": 1.4453, "step": 4582 }, { "epoch": 36.664, "grad_norm": 98.51426696777344, "learning_rate": 3.5217777777777776e-05, "loss": 1.0291, "step": 4583 }, { "epoch": 36.672, "grad_norm": 100.0560302734375, "learning_rate": 3.521333333333334e-05, "loss": 1.1602, "step": 4584 }, { "epoch": 36.68, "grad_norm": 34.05472946166992, "learning_rate": 3.520888888888889e-05, "loss": 1.9149, "step": 4585 }, { "epoch": 36.688, "grad_norm": 32.740028381347656, "learning_rate": 3.520444444444445e-05, "loss": 1.0626, "step": 4586 }, { "epoch": 36.696, "grad_norm": 19.643335342407227, "learning_rate": 3.52e-05, "loss": 1.2637, "step": 4587 }, { "epoch": 36.704, "grad_norm": 29.49457550048828, "learning_rate": 3.519555555555556e-05, "loss": 1.2361, "step": 4588 }, { "epoch": 36.712, "grad_norm": 40.26499938964844, "learning_rate": 3.519111111111111e-05, "loss": 0.9944, "step": 4589 }, { "epoch": 36.72, "grad_norm": 20.796369552612305, "learning_rate": 3.5186666666666666e-05, "loss": 1.2914, "step": 4590 }, { "epoch": 36.728, "grad_norm": 45.16490936279297, "learning_rate": 3.518222222222222e-05, "loss": 1.2286, "step": 4591 }, { "epoch": 36.736, "grad_norm": 40.7811393737793, "learning_rate": 3.517777777777778e-05, "loss": 1.3361, "step": 4592 }, { "epoch": 36.744, "grad_norm": 33.58012390136719, "learning_rate": 3.517333333333334e-05, "loss": 2.1151, "step": 4593 }, { "epoch": 36.752, "grad_norm": 39.879966735839844, "learning_rate": 3.516888888888889e-05, "loss": 0.8616, "step": 4594 }, { "epoch": 36.76, "grad_norm": 42.44010925292969, "learning_rate": 3.516444444444445e-05, "loss": 1.0508, "step": 4595 }, { "epoch": 36.768, "grad_norm": 29.806596755981445, "learning_rate": 3.516e-05, "loss": 0.8623, "step": 4596 }, { "epoch": 36.776, "grad_norm": 20.717403411865234, "learning_rate": 3.515555555555556e-05, "loss": 0.9983, "step": 4597 }, { "epoch": 36.784, "grad_norm": 29.431739807128906, "learning_rate": 3.515111111111111e-05, "loss": 1.3987, "step": 4598 }, { "epoch": 36.792, "grad_norm": 28.792226791381836, "learning_rate": 3.514666666666667e-05, "loss": 1.1887, "step": 4599 }, { "epoch": 36.8, "grad_norm": 16.85744285583496, "learning_rate": 3.514222222222223e-05, "loss": 0.8161, "step": 4600 }, { "epoch": 36.808, "grad_norm": 41.59613800048828, "learning_rate": 3.513777777777778e-05, "loss": 1.001, "step": 4601 }, { "epoch": 36.816, "grad_norm": 29.569467544555664, "learning_rate": 3.513333333333334e-05, "loss": 1.0215, "step": 4602 }, { "epoch": 36.824, "grad_norm": 25.860393524169922, "learning_rate": 3.5128888888888886e-05, "loss": 1.3938, "step": 4603 }, { "epoch": 36.832, "grad_norm": 19.911224365234375, "learning_rate": 3.512444444444445e-05, "loss": 1.1038, "step": 4604 }, { "epoch": 36.84, "grad_norm": 34.114864349365234, "learning_rate": 3.512e-05, "loss": 1.0211, "step": 4605 }, { "epoch": 36.848, "grad_norm": 136.24447631835938, "learning_rate": 3.511555555555556e-05, "loss": 1.3462, "step": 4606 }, { "epoch": 36.856, "grad_norm": 42.13872528076172, "learning_rate": 3.511111111111111e-05, "loss": 0.9806, "step": 4607 }, { "epoch": 36.864, "grad_norm": 42.52913284301758, "learning_rate": 3.5106666666666674e-05, "loss": 1.2763, "step": 4608 }, { "epoch": 36.872, "grad_norm": 31.51858901977539, "learning_rate": 3.510222222222222e-05, "loss": 0.9844, "step": 4609 }, { "epoch": 36.88, "grad_norm": 37.60589599609375, "learning_rate": 3.509777777777778e-05, "loss": 1.1961, "step": 4610 }, { "epoch": 36.888, "grad_norm": 32.518898010253906, "learning_rate": 3.509333333333333e-05, "loss": 1.3366, "step": 4611 }, { "epoch": 36.896, "grad_norm": 44.06313705444336, "learning_rate": 3.5088888888888886e-05, "loss": 1.1334, "step": 4612 }, { "epoch": 36.904, "grad_norm": 20.43815040588379, "learning_rate": 3.508444444444445e-05, "loss": 1.218, "step": 4613 }, { "epoch": 36.912, "grad_norm": 19.372379302978516, "learning_rate": 3.508e-05, "loss": 0.8825, "step": 4614 }, { "epoch": 36.92, "grad_norm": 50.776527404785156, "learning_rate": 3.507555555555556e-05, "loss": 1.0672, "step": 4615 }, { "epoch": 36.928, "grad_norm": 35.64638137817383, "learning_rate": 3.507111111111111e-05, "loss": 1.5759, "step": 4616 }, { "epoch": 36.936, "grad_norm": 29.562042236328125, "learning_rate": 3.506666666666667e-05, "loss": 1.4029, "step": 4617 }, { "epoch": 36.944, "grad_norm": 15.417291641235352, "learning_rate": 3.506222222222222e-05, "loss": 1.0096, "step": 4618 }, { "epoch": 36.952, "grad_norm": 24.676433563232422, "learning_rate": 3.505777777777778e-05, "loss": 1.0457, "step": 4619 }, { "epoch": 36.96, "grad_norm": 42.13737487792969, "learning_rate": 3.505333333333333e-05, "loss": 1.1904, "step": 4620 }, { "epoch": 36.968, "grad_norm": 46.91667556762695, "learning_rate": 3.5048888888888894e-05, "loss": 0.9522, "step": 4621 }, { "epoch": 36.976, "grad_norm": 38.26833724975586, "learning_rate": 3.504444444444445e-05, "loss": 1.0258, "step": 4622 }, { "epoch": 36.984, "grad_norm": 20.265029907226562, "learning_rate": 3.504e-05, "loss": 1.042, "step": 4623 }, { "epoch": 36.992, "grad_norm": 18.589109420776367, "learning_rate": 3.503555555555556e-05, "loss": 0.9699, "step": 4624 }, { "epoch": 37.0, "grad_norm": 39.00581359863281, "learning_rate": 3.503111111111111e-05, "loss": 1.7148, "step": 4625 }, { "epoch": 37.0, "eval_loss": 1.3016480207443237, "eval_map": 0.3172, "eval_map_50": 0.6464, "eval_map_75": 0.2717, "eval_map_Coverall": 0.5124, "eval_map_Face_Shield": 0.3717, "eval_map_Gloves": 0.2424, "eval_map_Goggles": 0.1487, "eval_map_Mask": 0.311, "eval_map_large": 0.4957, "eval_map_medium": 0.2053, "eval_map_small": 0.1575, "eval_mar_1": 0.2885, "eval_mar_10": 0.4927, "eval_mar_100": 0.5091, "eval_mar_100_Coverall": 0.6467, "eval_mar_100_Face_Shield": 0.6118, "eval_mar_100_Gloves": 0.3918, "eval_mar_100_Goggles": 0.4625, "eval_mar_100_Mask": 0.4327, "eval_mar_large": 0.6335, "eval_mar_medium": 0.4052, "eval_mar_small": 0.2458, "eval_runtime": 0.9312, "eval_samples_per_second": 31.141, "eval_steps_per_second": 2.148, "step": 4625 }, { "epoch": 37.008, "grad_norm": 22.23470115661621, "learning_rate": 3.502666666666667e-05, "loss": 1.1522, "step": 4626 }, { "epoch": 37.016, "grad_norm": 34.650753021240234, "learning_rate": 3.502222222222222e-05, "loss": 2.7091, "step": 4627 }, { "epoch": 37.024, "grad_norm": 17.12792205810547, "learning_rate": 3.501777777777778e-05, "loss": 0.9863, "step": 4628 }, { "epoch": 37.032, "grad_norm": 23.603052139282227, "learning_rate": 3.501333333333334e-05, "loss": 1.884, "step": 4629 }, { "epoch": 37.04, "grad_norm": 36.46373748779297, "learning_rate": 3.5008888888888894e-05, "loss": 0.9581, "step": 4630 }, { "epoch": 37.048, "grad_norm": 30.598003387451172, "learning_rate": 3.500444444444444e-05, "loss": 0.945, "step": 4631 }, { "epoch": 37.056, "grad_norm": 18.585172653198242, "learning_rate": 3.5e-05, "loss": 1.1959, "step": 4632 }, { "epoch": 37.064, "grad_norm": 34.55852127075195, "learning_rate": 3.499555555555556e-05, "loss": 1.0743, "step": 4633 }, { "epoch": 37.072, "grad_norm": 64.26773834228516, "learning_rate": 3.4991111111111113e-05, "loss": 1.0353, "step": 4634 }, { "epoch": 37.08, "grad_norm": 23.247482299804688, "learning_rate": 3.498666666666667e-05, "loss": 1.0807, "step": 4635 }, { "epoch": 37.088, "grad_norm": 131.01490783691406, "learning_rate": 3.498222222222222e-05, "loss": 1.3512, "step": 4636 }, { "epoch": 37.096, "grad_norm": 20.544666290283203, "learning_rate": 3.4977777777777785e-05, "loss": 1.3378, "step": 4637 }, { "epoch": 37.104, "grad_norm": 43.45826721191406, "learning_rate": 3.497333333333333e-05, "loss": 1.669, "step": 4638 }, { "epoch": 37.112, "grad_norm": 44.43025207519531, "learning_rate": 3.496888888888889e-05, "loss": 0.9251, "step": 4639 }, { "epoch": 37.12, "grad_norm": 34.45350646972656, "learning_rate": 3.496444444444444e-05, "loss": 1.036, "step": 4640 }, { "epoch": 37.128, "grad_norm": 23.530088424682617, "learning_rate": 3.4960000000000004e-05, "loss": 1.2659, "step": 4641 }, { "epoch": 37.136, "grad_norm": 12.997893333435059, "learning_rate": 3.495555555555556e-05, "loss": 0.8898, "step": 4642 }, { "epoch": 37.144, "grad_norm": 21.950502395629883, "learning_rate": 3.4951111111111114e-05, "loss": 0.891, "step": 4643 }, { "epoch": 37.152, "grad_norm": 28.688133239746094, "learning_rate": 3.494666666666667e-05, "loss": 1.0396, "step": 4644 }, { "epoch": 37.16, "grad_norm": 17.004179000854492, "learning_rate": 3.4942222222222223e-05, "loss": 1.0855, "step": 4645 }, { "epoch": 37.168, "grad_norm": 18.50916862487793, "learning_rate": 3.493777777777778e-05, "loss": 1.077, "step": 4646 }, { "epoch": 37.176, "grad_norm": 18.483932495117188, "learning_rate": 3.493333333333333e-05, "loss": 0.9426, "step": 4647 }, { "epoch": 37.184, "grad_norm": 25.17835235595703, "learning_rate": 3.492888888888889e-05, "loss": 1.0155, "step": 4648 }, { "epoch": 37.192, "grad_norm": 21.251081466674805, "learning_rate": 3.492444444444445e-05, "loss": 1.0138, "step": 4649 }, { "epoch": 37.2, "grad_norm": 14.494869232177734, "learning_rate": 3.4920000000000004e-05, "loss": 0.8931, "step": 4650 }, { "epoch": 37.208, "grad_norm": 23.876020431518555, "learning_rate": 3.491555555555556e-05, "loss": 1.1685, "step": 4651 }, { "epoch": 37.216, "grad_norm": 61.630802154541016, "learning_rate": 3.4911111111111114e-05, "loss": 1.4121, "step": 4652 }, { "epoch": 37.224, "grad_norm": 29.43292999267578, "learning_rate": 3.490666666666667e-05, "loss": 0.8615, "step": 4653 }, { "epoch": 37.232, "grad_norm": 71.25277709960938, "learning_rate": 3.4902222222222224e-05, "loss": 1.251, "step": 4654 }, { "epoch": 37.24, "grad_norm": 17.15047836303711, "learning_rate": 3.489777777777778e-05, "loss": 1.1348, "step": 4655 }, { "epoch": 37.248, "grad_norm": 23.192842483520508, "learning_rate": 3.4893333333333334e-05, "loss": 1.304, "step": 4656 }, { "epoch": 37.256, "grad_norm": 37.011478424072266, "learning_rate": 3.4888888888888895e-05, "loss": 1.4809, "step": 4657 }, { "epoch": 37.264, "grad_norm": 29.576053619384766, "learning_rate": 3.488444444444445e-05, "loss": 0.9967, "step": 4658 }, { "epoch": 37.272, "grad_norm": 39.077510833740234, "learning_rate": 3.4880000000000005e-05, "loss": 1.0618, "step": 4659 }, { "epoch": 37.28, "grad_norm": 18.69775390625, "learning_rate": 3.487555555555555e-05, "loss": 0.744, "step": 4660 }, { "epoch": 37.288, "grad_norm": 45.145423889160156, "learning_rate": 3.4871111111111115e-05, "loss": 1.3212, "step": 4661 }, { "epoch": 37.296, "grad_norm": 22.94744110107422, "learning_rate": 3.486666666666667e-05, "loss": 1.2247, "step": 4662 }, { "epoch": 37.304, "grad_norm": 21.063705444335938, "learning_rate": 3.4862222222222224e-05, "loss": 0.7378, "step": 4663 }, { "epoch": 37.312, "grad_norm": 33.24661636352539, "learning_rate": 3.485777777777778e-05, "loss": 1.4408, "step": 4664 }, { "epoch": 37.32, "grad_norm": 50.16910934448242, "learning_rate": 3.4853333333333334e-05, "loss": 1.0951, "step": 4665 }, { "epoch": 37.328, "grad_norm": 33.536922454833984, "learning_rate": 3.484888888888889e-05, "loss": 0.9234, "step": 4666 }, { "epoch": 37.336, "grad_norm": 24.506181716918945, "learning_rate": 3.4844444444444444e-05, "loss": 1.0793, "step": 4667 }, { "epoch": 37.344, "grad_norm": 63.26516342163086, "learning_rate": 3.484e-05, "loss": 3.5801, "step": 4668 }, { "epoch": 37.352, "grad_norm": 22.316118240356445, "learning_rate": 3.483555555555555e-05, "loss": 0.9521, "step": 4669 }, { "epoch": 37.36, "grad_norm": 75.24462890625, "learning_rate": 3.4831111111111115e-05, "loss": 1.3585, "step": 4670 }, { "epoch": 37.368, "grad_norm": 38.74032974243164, "learning_rate": 3.482666666666667e-05, "loss": 1.5789, "step": 4671 }, { "epoch": 37.376, "grad_norm": 25.127168655395508, "learning_rate": 3.4822222222222225e-05, "loss": 0.8983, "step": 4672 }, { "epoch": 37.384, "grad_norm": 63.6871337890625, "learning_rate": 3.481777777777778e-05, "loss": 2.0099, "step": 4673 }, { "epoch": 37.392, "grad_norm": 285.4446716308594, "learning_rate": 3.4813333333333334e-05, "loss": 1.2663, "step": 4674 }, { "epoch": 37.4, "grad_norm": 16.941389083862305, "learning_rate": 3.480888888888889e-05, "loss": 1.4196, "step": 4675 }, { "epoch": 37.408, "grad_norm": 34.29830551147461, "learning_rate": 3.4804444444444444e-05, "loss": 1.1376, "step": 4676 }, { "epoch": 37.416, "grad_norm": 20.77613067626953, "learning_rate": 3.48e-05, "loss": 1.0768, "step": 4677 }, { "epoch": 37.424, "grad_norm": 24.135194778442383, "learning_rate": 3.479555555555556e-05, "loss": 0.8301, "step": 4678 }, { "epoch": 37.432, "grad_norm": 18.85253143310547, "learning_rate": 3.4791111111111115e-05, "loss": 1.1459, "step": 4679 }, { "epoch": 37.44, "grad_norm": 26.004318237304688, "learning_rate": 3.478666666666667e-05, "loss": 0.799, "step": 4680 }, { "epoch": 37.448, "grad_norm": 16.962255477905273, "learning_rate": 3.478222222222222e-05, "loss": 0.9828, "step": 4681 }, { "epoch": 37.456, "grad_norm": 176.5706787109375, "learning_rate": 3.477777777777778e-05, "loss": 1.0243, "step": 4682 }, { "epoch": 37.464, "grad_norm": 31.230974197387695, "learning_rate": 3.4773333333333335e-05, "loss": 1.2616, "step": 4683 }, { "epoch": 37.472, "grad_norm": 32.77106857299805, "learning_rate": 3.476888888888889e-05, "loss": 1.306, "step": 4684 }, { "epoch": 37.48, "grad_norm": 29.143993377685547, "learning_rate": 3.4764444444444444e-05, "loss": 1.1438, "step": 4685 }, { "epoch": 37.488, "grad_norm": 17.643007278442383, "learning_rate": 3.4760000000000006e-05, "loss": 0.8943, "step": 4686 }, { "epoch": 37.496, "grad_norm": 34.98828887939453, "learning_rate": 3.475555555555556e-05, "loss": 1.2361, "step": 4687 }, { "epoch": 37.504, "grad_norm": 45.508544921875, "learning_rate": 3.475111111111111e-05, "loss": 0.9131, "step": 4688 }, { "epoch": 37.512, "grad_norm": 67.9127197265625, "learning_rate": 3.4746666666666664e-05, "loss": 0.9695, "step": 4689 }, { "epoch": 37.52, "grad_norm": 119.45172882080078, "learning_rate": 3.4742222222222225e-05, "loss": 1.0542, "step": 4690 }, { "epoch": 37.528, "grad_norm": 46.2356071472168, "learning_rate": 3.473777777777778e-05, "loss": 1.0947, "step": 4691 }, { "epoch": 37.536, "grad_norm": 43.401519775390625, "learning_rate": 3.4733333333333335e-05, "loss": 1.3235, "step": 4692 }, { "epoch": 37.544, "grad_norm": 19.529953002929688, "learning_rate": 3.472888888888889e-05, "loss": 1.2258, "step": 4693 }, { "epoch": 37.552, "grad_norm": 20.670265197753906, "learning_rate": 3.472444444444445e-05, "loss": 1.1888, "step": 4694 }, { "epoch": 37.56, "grad_norm": 29.23995018005371, "learning_rate": 3.472e-05, "loss": 1.0412, "step": 4695 }, { "epoch": 37.568, "grad_norm": 20.300874710083008, "learning_rate": 3.4715555555555554e-05, "loss": 1.23, "step": 4696 }, { "epoch": 37.576, "grad_norm": 31.640134811401367, "learning_rate": 3.471111111111111e-05, "loss": 1.2596, "step": 4697 }, { "epoch": 37.584, "grad_norm": 39.123779296875, "learning_rate": 3.470666666666667e-05, "loss": 2.3376, "step": 4698 }, { "epoch": 37.592, "grad_norm": 20.419397354125977, "learning_rate": 3.4702222222222226e-05, "loss": 0.8945, "step": 4699 }, { "epoch": 37.6, "grad_norm": 20.69231605529785, "learning_rate": 3.469777777777778e-05, "loss": 0.929, "step": 4700 }, { "epoch": 37.608, "grad_norm": 17.136028289794922, "learning_rate": 3.4693333333333335e-05, "loss": 0.9633, "step": 4701 }, { "epoch": 37.616, "grad_norm": 21.420164108276367, "learning_rate": 3.468888888888889e-05, "loss": 1.4612, "step": 4702 }, { "epoch": 37.624, "grad_norm": 14.546588897705078, "learning_rate": 3.4684444444444445e-05, "loss": 1.2186, "step": 4703 }, { "epoch": 37.632, "grad_norm": 18.7222843170166, "learning_rate": 3.468e-05, "loss": 1.0726, "step": 4704 }, { "epoch": 37.64, "grad_norm": 27.568532943725586, "learning_rate": 3.4675555555555555e-05, "loss": 1.3721, "step": 4705 }, { "epoch": 37.648, "grad_norm": 28.444732666015625, "learning_rate": 3.4671111111111116e-05, "loss": 1.083, "step": 4706 }, { "epoch": 37.656, "grad_norm": 42.52273178100586, "learning_rate": 3.466666666666667e-05, "loss": 1.4806, "step": 4707 }, { "epoch": 37.664, "grad_norm": 24.985368728637695, "learning_rate": 3.4662222222222226e-05, "loss": 1.0715, "step": 4708 }, { "epoch": 37.672, "grad_norm": 49.63264846801758, "learning_rate": 3.465777777777778e-05, "loss": 1.0426, "step": 4709 }, { "epoch": 37.68, "grad_norm": 62.904296875, "learning_rate": 3.4653333333333336e-05, "loss": 1.1886, "step": 4710 }, { "epoch": 37.688, "grad_norm": 17.017574310302734, "learning_rate": 3.464888888888889e-05, "loss": 1.0607, "step": 4711 }, { "epoch": 37.696, "grad_norm": 25.907800674438477, "learning_rate": 3.4644444444444446e-05, "loss": 1.131, "step": 4712 }, { "epoch": 37.704, "grad_norm": 22.77420997619629, "learning_rate": 3.464e-05, "loss": 1.3353, "step": 4713 }, { "epoch": 37.712, "grad_norm": 51.25349426269531, "learning_rate": 3.463555555555556e-05, "loss": 1.2712, "step": 4714 }, { "epoch": 37.72, "grad_norm": 39.97867965698242, "learning_rate": 3.463111111111112e-05, "loss": 1.0897, "step": 4715 }, { "epoch": 37.728, "grad_norm": 36.0957145690918, "learning_rate": 3.462666666666667e-05, "loss": 1.1962, "step": 4716 }, { "epoch": 37.736, "grad_norm": 32.47708511352539, "learning_rate": 3.462222222222222e-05, "loss": 1.4001, "step": 4717 }, { "epoch": 37.744, "grad_norm": 52.79838943481445, "learning_rate": 3.4617777777777775e-05, "loss": 1.2605, "step": 4718 }, { "epoch": 37.752, "grad_norm": 92.94499206542969, "learning_rate": 3.4613333333333336e-05, "loss": 0.8564, "step": 4719 }, { "epoch": 37.76, "grad_norm": 34.527164459228516, "learning_rate": 3.460888888888889e-05, "loss": 1.2448, "step": 4720 }, { "epoch": 37.768, "grad_norm": 36.582733154296875, "learning_rate": 3.4604444444444446e-05, "loss": 0.991, "step": 4721 }, { "epoch": 37.776, "grad_norm": 17.876405715942383, "learning_rate": 3.46e-05, "loss": 0.997, "step": 4722 }, { "epoch": 37.784, "grad_norm": 51.07799530029297, "learning_rate": 3.4595555555555556e-05, "loss": 1.3825, "step": 4723 }, { "epoch": 37.792, "grad_norm": 30.707809448242188, "learning_rate": 3.459111111111111e-05, "loss": 0.7345, "step": 4724 }, { "epoch": 37.8, "grad_norm": 31.628559112548828, "learning_rate": 3.4586666666666665e-05, "loss": 1.0893, "step": 4725 }, { "epoch": 37.808, "grad_norm": 35.66128158569336, "learning_rate": 3.458222222222222e-05, "loss": 1.2289, "step": 4726 }, { "epoch": 37.816, "grad_norm": 17.953359603881836, "learning_rate": 3.457777777777778e-05, "loss": 1.4463, "step": 4727 }, { "epoch": 37.824, "grad_norm": 57.661251068115234, "learning_rate": 3.4573333333333337e-05, "loss": 1.152, "step": 4728 }, { "epoch": 37.832, "grad_norm": 28.57618522644043, "learning_rate": 3.456888888888889e-05, "loss": 1.1265, "step": 4729 }, { "epoch": 37.84, "grad_norm": 26.314105987548828, "learning_rate": 3.4564444444444446e-05, "loss": 0.9397, "step": 4730 }, { "epoch": 37.848, "grad_norm": 36.65727233886719, "learning_rate": 3.456e-05, "loss": 1.1643, "step": 4731 }, { "epoch": 37.856, "grad_norm": 28.10890007019043, "learning_rate": 3.4555555555555556e-05, "loss": 1.8968, "step": 4732 }, { "epoch": 37.864, "grad_norm": 26.416088104248047, "learning_rate": 3.455111111111111e-05, "loss": 0.837, "step": 4733 }, { "epoch": 37.872, "grad_norm": 26.226537704467773, "learning_rate": 3.4546666666666666e-05, "loss": 1.4431, "step": 4734 }, { "epoch": 37.88, "grad_norm": 17.736722946166992, "learning_rate": 3.454222222222223e-05, "loss": 0.9661, "step": 4735 }, { "epoch": 37.888, "grad_norm": 19.318283081054688, "learning_rate": 3.453777777777778e-05, "loss": 1.2114, "step": 4736 }, { "epoch": 37.896, "grad_norm": 21.442249298095703, "learning_rate": 3.453333333333334e-05, "loss": 1.1657, "step": 4737 }, { "epoch": 37.904, "grad_norm": 23.873090744018555, "learning_rate": 3.4528888888888885e-05, "loss": 1.2317, "step": 4738 }, { "epoch": 37.912, "grad_norm": 46.0141487121582, "learning_rate": 3.452444444444445e-05, "loss": 1.3037, "step": 4739 }, { "epoch": 37.92, "grad_norm": 30.359182357788086, "learning_rate": 3.452e-05, "loss": 1.5874, "step": 4740 }, { "epoch": 37.928, "grad_norm": 62.23090362548828, "learning_rate": 3.4515555555555556e-05, "loss": 1.2614, "step": 4741 }, { "epoch": 37.936, "grad_norm": 16.819028854370117, "learning_rate": 3.451111111111111e-05, "loss": 1.0848, "step": 4742 }, { "epoch": 37.944, "grad_norm": 14.623041152954102, "learning_rate": 3.450666666666667e-05, "loss": 1.1218, "step": 4743 }, { "epoch": 37.952, "grad_norm": 31.937170028686523, "learning_rate": 3.450222222222223e-05, "loss": 1.2682, "step": 4744 }, { "epoch": 37.96, "grad_norm": 25.38582420349121, "learning_rate": 3.4497777777777776e-05, "loss": 1.1606, "step": 4745 }, { "epoch": 37.968, "grad_norm": 21.223913192749023, "learning_rate": 3.449333333333333e-05, "loss": 1.2136, "step": 4746 }, { "epoch": 37.976, "grad_norm": 20.370960235595703, "learning_rate": 3.448888888888889e-05, "loss": 1.0951, "step": 4747 }, { "epoch": 37.984, "grad_norm": 23.778993606567383, "learning_rate": 3.448444444444445e-05, "loss": 1.3207, "step": 4748 }, { "epoch": 37.992, "grad_norm": 15.959517478942871, "learning_rate": 3.448e-05, "loss": 0.9558, "step": 4749 }, { "epoch": 38.0, "grad_norm": 21.917322158813477, "learning_rate": 3.447555555555556e-05, "loss": 1.12, "step": 4750 }, { "epoch": 38.0, "eval_loss": 1.227369785308838, "eval_map": 0.2858, "eval_map_50": 0.5875, "eval_map_75": 0.2486, "eval_map_Coverall": 0.5196, "eval_map_Face_Shield": 0.2782, "eval_map_Gloves": 0.2283, "eval_map_Goggles": 0.1074, "eval_map_Mask": 0.2954, "eval_map_large": 0.4789, "eval_map_medium": 0.1838, "eval_map_small": 0.1789, "eval_mar_1": 0.2611, "eval_mar_10": 0.4674, "eval_mar_100": 0.4867, "eval_mar_100_Coverall": 0.6444, "eval_mar_100_Face_Shield": 0.6412, "eval_mar_100_Gloves": 0.3492, "eval_mar_100_Goggles": 0.3969, "eval_mar_100_Mask": 0.4019, "eval_mar_large": 0.6267, "eval_mar_medium": 0.4235, "eval_mar_small": 0.2285, "eval_runtime": 0.9509, "eval_samples_per_second": 30.498, "eval_steps_per_second": 2.103, "step": 4750 }, { "epoch": 38.008, "grad_norm": 28.093191146850586, "learning_rate": 3.447111111111112e-05, "loss": 1.0486, "step": 4751 }, { "epoch": 38.016, "grad_norm": 42.97468948364258, "learning_rate": 3.4466666666666666e-05, "loss": 1.1099, "step": 4752 }, { "epoch": 38.024, "grad_norm": 32.13359069824219, "learning_rate": 3.446222222222222e-05, "loss": 1.2545, "step": 4753 }, { "epoch": 38.032, "grad_norm": 47.872989654541016, "learning_rate": 3.4457777777777776e-05, "loss": 1.1007, "step": 4754 }, { "epoch": 38.04, "grad_norm": 27.849777221679688, "learning_rate": 3.445333333333334e-05, "loss": 1.2083, "step": 4755 }, { "epoch": 38.048, "grad_norm": 26.44498062133789, "learning_rate": 3.444888888888889e-05, "loss": 0.8907, "step": 4756 }, { "epoch": 38.056, "grad_norm": 28.81896209716797, "learning_rate": 3.444444444444445e-05, "loss": 1.2478, "step": 4757 }, { "epoch": 38.064, "grad_norm": 62.62900161743164, "learning_rate": 3.444e-05, "loss": 0.9796, "step": 4758 }, { "epoch": 38.072, "grad_norm": 23.285036087036133, "learning_rate": 3.443555555555556e-05, "loss": 1.0562, "step": 4759 }, { "epoch": 38.08, "grad_norm": 26.432287216186523, "learning_rate": 3.443111111111111e-05, "loss": 1.1598, "step": 4760 }, { "epoch": 38.088, "grad_norm": 29.788734436035156, "learning_rate": 3.442666666666667e-05, "loss": 0.9486, "step": 4761 }, { "epoch": 38.096, "grad_norm": 23.018949508666992, "learning_rate": 3.442222222222222e-05, "loss": 1.2126, "step": 4762 }, { "epoch": 38.104, "grad_norm": 17.114439010620117, "learning_rate": 3.441777777777778e-05, "loss": 0.6392, "step": 4763 }, { "epoch": 38.112, "grad_norm": 22.254878997802734, "learning_rate": 3.441333333333334e-05, "loss": 1.1497, "step": 4764 }, { "epoch": 38.12, "grad_norm": 30.971315383911133, "learning_rate": 3.440888888888889e-05, "loss": 0.8603, "step": 4765 }, { "epoch": 38.128, "grad_norm": 16.487659454345703, "learning_rate": 3.440444444444445e-05, "loss": 0.8471, "step": 4766 }, { "epoch": 38.136, "grad_norm": 47.10128402709961, "learning_rate": 3.4399999999999996e-05, "loss": 0.9194, "step": 4767 }, { "epoch": 38.144, "grad_norm": 37.405059814453125, "learning_rate": 3.439555555555556e-05, "loss": 1.0651, "step": 4768 }, { "epoch": 38.152, "grad_norm": 24.54397201538086, "learning_rate": 3.439111111111111e-05, "loss": 1.4916, "step": 4769 }, { "epoch": 38.16, "grad_norm": 37.600425720214844, "learning_rate": 3.438666666666667e-05, "loss": 1.1767, "step": 4770 }, { "epoch": 38.168, "grad_norm": 58.58785629272461, "learning_rate": 3.438222222222222e-05, "loss": 0.7281, "step": 4771 }, { "epoch": 38.176, "grad_norm": 19.76998519897461, "learning_rate": 3.4377777777777784e-05, "loss": 1.416, "step": 4772 }, { "epoch": 38.184, "grad_norm": 21.269222259521484, "learning_rate": 3.437333333333334e-05, "loss": 1.2857, "step": 4773 }, { "epoch": 38.192, "grad_norm": 36.831363677978516, "learning_rate": 3.4368888888888887e-05, "loss": 0.9609, "step": 4774 }, { "epoch": 38.2, "grad_norm": 26.31774139404297, "learning_rate": 3.436444444444444e-05, "loss": 1.1555, "step": 4775 }, { "epoch": 38.208, "grad_norm": 21.473323822021484, "learning_rate": 3.436e-05, "loss": 2.1902, "step": 4776 }, { "epoch": 38.216, "grad_norm": 18.866498947143555, "learning_rate": 3.435555555555556e-05, "loss": 0.8014, "step": 4777 }, { "epoch": 38.224, "grad_norm": 18.998748779296875, "learning_rate": 3.435111111111111e-05, "loss": 0.8366, "step": 4778 }, { "epoch": 38.232, "grad_norm": 21.998615264892578, "learning_rate": 3.434666666666667e-05, "loss": 1.134, "step": 4779 }, { "epoch": 38.24, "grad_norm": 24.153282165527344, "learning_rate": 3.434222222222222e-05, "loss": 0.888, "step": 4780 }, { "epoch": 38.248, "grad_norm": 21.299779891967773, "learning_rate": 3.433777777777778e-05, "loss": 0.9207, "step": 4781 }, { "epoch": 38.256, "grad_norm": 60.210052490234375, "learning_rate": 3.433333333333333e-05, "loss": 1.0663, "step": 4782 }, { "epoch": 38.264, "grad_norm": 52.99997329711914, "learning_rate": 3.432888888888889e-05, "loss": 1.9963, "step": 4783 }, { "epoch": 38.272, "grad_norm": 43.1383171081543, "learning_rate": 3.432444444444445e-05, "loss": 1.1309, "step": 4784 }, { "epoch": 38.28, "grad_norm": 25.960939407348633, "learning_rate": 3.4320000000000003e-05, "loss": 1.2782, "step": 4785 }, { "epoch": 38.288, "grad_norm": 17.73378562927246, "learning_rate": 3.431555555555556e-05, "loss": 0.9916, "step": 4786 }, { "epoch": 38.296, "grad_norm": 36.847164154052734, "learning_rate": 3.431111111111111e-05, "loss": 0.8809, "step": 4787 }, { "epoch": 38.304, "grad_norm": 29.036968231201172, "learning_rate": 3.430666666666667e-05, "loss": 1.2039, "step": 4788 }, { "epoch": 38.312, "grad_norm": 23.410573959350586, "learning_rate": 3.430222222222222e-05, "loss": 1.4226, "step": 4789 }, { "epoch": 38.32, "grad_norm": 21.197818756103516, "learning_rate": 3.429777777777778e-05, "loss": 1.0322, "step": 4790 }, { "epoch": 38.328, "grad_norm": 21.009504318237305, "learning_rate": 3.429333333333333e-05, "loss": 1.1428, "step": 4791 }, { "epoch": 38.336, "grad_norm": 41.784095764160156, "learning_rate": 3.4288888888888894e-05, "loss": 1.5482, "step": 4792 }, { "epoch": 38.344, "grad_norm": 27.649843215942383, "learning_rate": 3.428444444444445e-05, "loss": 0.8863, "step": 4793 }, { "epoch": 38.352, "grad_norm": 32.25556945800781, "learning_rate": 3.4280000000000004e-05, "loss": 1.2739, "step": 4794 }, { "epoch": 38.36, "grad_norm": 33.75053405761719, "learning_rate": 3.427555555555555e-05, "loss": 1.3221, "step": 4795 }, { "epoch": 38.368, "grad_norm": 22.44923973083496, "learning_rate": 3.4271111111111114e-05, "loss": 0.969, "step": 4796 }, { "epoch": 38.376, "grad_norm": 19.99530029296875, "learning_rate": 3.426666666666667e-05, "loss": 0.8267, "step": 4797 }, { "epoch": 38.384, "grad_norm": 46.8647575378418, "learning_rate": 3.426222222222222e-05, "loss": 0.8766, "step": 4798 }, { "epoch": 38.392, "grad_norm": 21.479955673217773, "learning_rate": 3.425777777777778e-05, "loss": 0.9776, "step": 4799 }, { "epoch": 38.4, "grad_norm": 18.809038162231445, "learning_rate": 3.425333333333334e-05, "loss": 1.2831, "step": 4800 }, { "epoch": 38.408, "grad_norm": 36.96780014038086, "learning_rate": 3.4248888888888895e-05, "loss": 0.8335, "step": 4801 }, { "epoch": 38.416, "grad_norm": 26.8531494140625, "learning_rate": 3.424444444444444e-05, "loss": 2.048, "step": 4802 }, { "epoch": 38.424, "grad_norm": 15.124743461608887, "learning_rate": 3.424e-05, "loss": 1.2229, "step": 4803 }, { "epoch": 38.432, "grad_norm": 18.943424224853516, "learning_rate": 3.423555555555556e-05, "loss": 1.1059, "step": 4804 }, { "epoch": 38.44, "grad_norm": 24.036972045898438, "learning_rate": 3.4231111111111114e-05, "loss": 0.9644, "step": 4805 }, { "epoch": 38.448, "grad_norm": 37.469078063964844, "learning_rate": 3.422666666666667e-05, "loss": 1.1538, "step": 4806 }, { "epoch": 38.456, "grad_norm": 22.938209533691406, "learning_rate": 3.4222222222222224e-05, "loss": 1.0037, "step": 4807 }, { "epoch": 38.464, "grad_norm": 24.663551330566406, "learning_rate": 3.4217777777777785e-05, "loss": 0.9336, "step": 4808 }, { "epoch": 38.472, "grad_norm": 26.546417236328125, "learning_rate": 3.421333333333333e-05, "loss": 1.2807, "step": 4809 }, { "epoch": 38.48, "grad_norm": 19.884191513061523, "learning_rate": 3.420888888888889e-05, "loss": 1.2482, "step": 4810 }, { "epoch": 38.488, "grad_norm": 24.101900100708008, "learning_rate": 3.420444444444444e-05, "loss": 1.0347, "step": 4811 }, { "epoch": 38.496, "grad_norm": 31.94198226928711, "learning_rate": 3.4200000000000005e-05, "loss": 0.8026, "step": 4812 }, { "epoch": 38.504, "grad_norm": 15.500545501708984, "learning_rate": 3.419555555555556e-05, "loss": 1.1808, "step": 4813 }, { "epoch": 38.512, "grad_norm": 18.87955093383789, "learning_rate": 3.4191111111111114e-05, "loss": 0.8448, "step": 4814 }, { "epoch": 38.52, "grad_norm": 27.355670928955078, "learning_rate": 3.418666666666667e-05, "loss": 1.5895, "step": 4815 }, { "epoch": 38.528, "grad_norm": 48.479835510253906, "learning_rate": 3.4182222222222224e-05, "loss": 1.4431, "step": 4816 }, { "epoch": 38.536, "grad_norm": 27.610376358032227, "learning_rate": 3.417777777777778e-05, "loss": 1.2913, "step": 4817 }, { "epoch": 38.544, "grad_norm": 55.12522888183594, "learning_rate": 3.4173333333333334e-05, "loss": 1.1491, "step": 4818 }, { "epoch": 38.552, "grad_norm": 29.78897476196289, "learning_rate": 3.416888888888889e-05, "loss": 0.8332, "step": 4819 }, { "epoch": 38.56, "grad_norm": 30.456071853637695, "learning_rate": 3.416444444444444e-05, "loss": 1.0223, "step": 4820 }, { "epoch": 38.568, "grad_norm": 25.56040382385254, "learning_rate": 3.4160000000000005e-05, "loss": 0.837, "step": 4821 }, { "epoch": 38.576, "grad_norm": 32.15056228637695, "learning_rate": 3.415555555555556e-05, "loss": 0.8508, "step": 4822 }, { "epoch": 38.584, "grad_norm": 19.141756057739258, "learning_rate": 3.4151111111111115e-05, "loss": 1.1341, "step": 4823 }, { "epoch": 38.592, "grad_norm": 55.51803207397461, "learning_rate": 3.414666666666666e-05, "loss": 1.2967, "step": 4824 }, { "epoch": 38.6, "grad_norm": 16.222806930541992, "learning_rate": 3.4142222222222224e-05, "loss": 1.2514, "step": 4825 }, { "epoch": 38.608, "grad_norm": 54.87656784057617, "learning_rate": 3.413777777777778e-05, "loss": 0.9798, "step": 4826 }, { "epoch": 38.616, "grad_norm": 31.607826232910156, "learning_rate": 3.4133333333333334e-05, "loss": 0.9052, "step": 4827 }, { "epoch": 38.624, "grad_norm": 26.03903579711914, "learning_rate": 3.412888888888889e-05, "loss": 1.0775, "step": 4828 }, { "epoch": 38.632, "grad_norm": 11.636911392211914, "learning_rate": 3.412444444444445e-05, "loss": 1.6974, "step": 4829 }, { "epoch": 38.64, "grad_norm": 31.673219680786133, "learning_rate": 3.412e-05, "loss": 1.2104, "step": 4830 }, { "epoch": 38.648, "grad_norm": 34.60627365112305, "learning_rate": 3.4115555555555553e-05, "loss": 1.1721, "step": 4831 }, { "epoch": 38.656, "grad_norm": 21.142370223999023, "learning_rate": 3.411111111111111e-05, "loss": 0.9506, "step": 4832 }, { "epoch": 38.664, "grad_norm": 22.29458999633789, "learning_rate": 3.410666666666667e-05, "loss": 1.3001, "step": 4833 }, { "epoch": 38.672, "grad_norm": 24.474687576293945, "learning_rate": 3.4102222222222225e-05, "loss": 1.2151, "step": 4834 }, { "epoch": 38.68, "grad_norm": 19.487001419067383, "learning_rate": 3.409777777777778e-05, "loss": 1.4275, "step": 4835 }, { "epoch": 38.688, "grad_norm": 25.32391357421875, "learning_rate": 3.4093333333333334e-05, "loss": 1.0919, "step": 4836 }, { "epoch": 38.696, "grad_norm": 37.06583023071289, "learning_rate": 3.408888888888889e-05, "loss": 0.9591, "step": 4837 }, { "epoch": 38.704, "grad_norm": 28.602859497070312, "learning_rate": 3.4084444444444444e-05, "loss": 2.3341, "step": 4838 }, { "epoch": 38.712, "grad_norm": 36.24228286743164, "learning_rate": 3.408e-05, "loss": 1.0617, "step": 4839 }, { "epoch": 38.72, "grad_norm": 16.195680618286133, "learning_rate": 3.4075555555555554e-05, "loss": 1.3336, "step": 4840 }, { "epoch": 38.728, "grad_norm": 93.76915740966797, "learning_rate": 3.4071111111111115e-05, "loss": 3.0304, "step": 4841 }, { "epoch": 38.736, "grad_norm": 77.51592254638672, "learning_rate": 3.406666666666667e-05, "loss": 1.7078, "step": 4842 }, { "epoch": 38.744, "grad_norm": 19.652711868286133, "learning_rate": 3.4062222222222225e-05, "loss": 1.0104, "step": 4843 }, { "epoch": 38.752, "grad_norm": 22.94831085205078, "learning_rate": 3.405777777777778e-05, "loss": 1.1303, "step": 4844 }, { "epoch": 38.76, "grad_norm": 46.1721305847168, "learning_rate": 3.4053333333333335e-05, "loss": 0.9176, "step": 4845 }, { "epoch": 38.768, "grad_norm": 15.458398818969727, "learning_rate": 3.404888888888889e-05, "loss": 1.2101, "step": 4846 }, { "epoch": 38.776, "grad_norm": 28.55185890197754, "learning_rate": 3.4044444444444445e-05, "loss": 1.1381, "step": 4847 }, { "epoch": 38.784, "grad_norm": 19.034692764282227, "learning_rate": 3.404e-05, "loss": 1.2932, "step": 4848 }, { "epoch": 38.792, "grad_norm": 36.30173110961914, "learning_rate": 3.403555555555556e-05, "loss": 1.0674, "step": 4849 }, { "epoch": 38.8, "grad_norm": 23.251968383789062, "learning_rate": 3.4031111111111116e-05, "loss": 0.9207, "step": 4850 }, { "epoch": 38.808, "grad_norm": 38.38898849487305, "learning_rate": 3.402666666666667e-05, "loss": 1.3716, "step": 4851 }, { "epoch": 38.816, "grad_norm": 28.309917449951172, "learning_rate": 3.402222222222222e-05, "loss": 1.0821, "step": 4852 }, { "epoch": 38.824, "grad_norm": 25.77532196044922, "learning_rate": 3.401777777777778e-05, "loss": 0.8163, "step": 4853 }, { "epoch": 38.832, "grad_norm": 17.275419235229492, "learning_rate": 3.4013333333333335e-05, "loss": 1.1744, "step": 4854 }, { "epoch": 38.84, "grad_norm": 27.02935028076172, "learning_rate": 3.400888888888889e-05, "loss": 0.9975, "step": 4855 }, { "epoch": 38.848, "grad_norm": 28.97055435180664, "learning_rate": 3.4004444444444445e-05, "loss": 0.9026, "step": 4856 }, { "epoch": 38.856, "grad_norm": 40.05592727661133, "learning_rate": 3.4000000000000007e-05, "loss": 1.468, "step": 4857 }, { "epoch": 38.864, "grad_norm": 31.641620635986328, "learning_rate": 3.399555555555556e-05, "loss": 1.0851, "step": 4858 }, { "epoch": 38.872, "grad_norm": 25.03368377685547, "learning_rate": 3.399111111111111e-05, "loss": 0.9904, "step": 4859 }, { "epoch": 38.88, "grad_norm": 21.941184997558594, "learning_rate": 3.3986666666666664e-05, "loss": 1.1385, "step": 4860 }, { "epoch": 38.888, "grad_norm": 14.300610542297363, "learning_rate": 3.3982222222222226e-05, "loss": 1.3021, "step": 4861 }, { "epoch": 38.896, "grad_norm": 18.39886474609375, "learning_rate": 3.397777777777778e-05, "loss": 1.6287, "step": 4862 }, { "epoch": 38.904, "grad_norm": 25.971643447875977, "learning_rate": 3.3973333333333336e-05, "loss": 1.4437, "step": 4863 }, { "epoch": 38.912, "grad_norm": 23.278079986572266, "learning_rate": 3.396888888888889e-05, "loss": 1.0487, "step": 4864 }, { "epoch": 38.92, "grad_norm": 18.78101348876953, "learning_rate": 3.396444444444445e-05, "loss": 0.9905, "step": 4865 }, { "epoch": 38.928, "grad_norm": 23.993154525756836, "learning_rate": 3.396e-05, "loss": 1.407, "step": 4866 }, { "epoch": 38.936, "grad_norm": 28.361276626586914, "learning_rate": 3.3955555555555555e-05, "loss": 1.0927, "step": 4867 }, { "epoch": 38.944, "grad_norm": 29.741943359375, "learning_rate": 3.395111111111111e-05, "loss": 1.0114, "step": 4868 }, { "epoch": 38.952, "grad_norm": 26.597003936767578, "learning_rate": 3.394666666666667e-05, "loss": 1.896, "step": 4869 }, { "epoch": 38.96, "grad_norm": 85.46173858642578, "learning_rate": 3.3942222222222226e-05, "loss": 0.7476, "step": 4870 }, { "epoch": 38.968, "grad_norm": 26.24878692626953, "learning_rate": 3.393777777777778e-05, "loss": 1.2587, "step": 4871 }, { "epoch": 38.976, "grad_norm": 45.263145446777344, "learning_rate": 3.3933333333333336e-05, "loss": 1.0416, "step": 4872 }, { "epoch": 38.984, "grad_norm": 32.55408477783203, "learning_rate": 3.392888888888889e-05, "loss": 1.6689, "step": 4873 }, { "epoch": 38.992, "grad_norm": 12.629850387573242, "learning_rate": 3.3924444444444446e-05, "loss": 1.0853, "step": 4874 }, { "epoch": 39.0, "grad_norm": 25.378320693969727, "learning_rate": 3.392e-05, "loss": 1.099, "step": 4875 }, { "epoch": 39.0, "eval_loss": 1.1811236143112183, "eval_map": 0.318, "eval_map_50": 0.6399, "eval_map_75": 0.3063, "eval_map_Coverall": 0.5456, "eval_map_Face_Shield": 0.376, "eval_map_Gloves": 0.2329, "eval_map_Goggles": 0.1294, "eval_map_Mask": 0.306, "eval_map_large": 0.5483, "eval_map_medium": 0.1922, "eval_map_small": 0.1334, "eval_mar_1": 0.2751, "eval_mar_10": 0.4784, "eval_mar_100": 0.496, "eval_mar_100_Coverall": 0.7244, "eval_mar_100_Face_Shield": 0.5353, "eval_mar_100_Gloves": 0.3885, "eval_mar_100_Goggles": 0.4281, "eval_mar_100_Mask": 0.4038, "eval_mar_large": 0.6718, "eval_mar_medium": 0.3695, "eval_mar_small": 0.1784, "eval_runtime": 0.9334, "eval_samples_per_second": 31.07, "eval_steps_per_second": 2.143, "step": 4875 }, { "epoch": 39.008, "grad_norm": 25.832473754882812, "learning_rate": 3.3915555555555555e-05, "loss": 2.1298, "step": 4876 }, { "epoch": 39.016, "grad_norm": 28.215412139892578, "learning_rate": 3.391111111111111e-05, "loss": 2.0906, "step": 4877 }, { "epoch": 39.024, "grad_norm": 38.43354415893555, "learning_rate": 3.390666666666667e-05, "loss": 1.0245, "step": 4878 }, { "epoch": 39.032, "grad_norm": 22.790494918823242, "learning_rate": 3.390222222222223e-05, "loss": 1.1206, "step": 4879 }, { "epoch": 39.04, "grad_norm": 18.23823356628418, "learning_rate": 3.389777777777778e-05, "loss": 1.0548, "step": 4880 }, { "epoch": 39.048, "grad_norm": 22.314834594726562, "learning_rate": 3.389333333333333e-05, "loss": 1.0341, "step": 4881 }, { "epoch": 39.056, "grad_norm": 29.11233901977539, "learning_rate": 3.388888888888889e-05, "loss": 1.2171, "step": 4882 }, { "epoch": 39.064, "grad_norm": 39.886592864990234, "learning_rate": 3.3884444444444446e-05, "loss": 1.0663, "step": 4883 }, { "epoch": 39.072, "grad_norm": 24.622602462768555, "learning_rate": 3.388e-05, "loss": 1.0077, "step": 4884 }, { "epoch": 39.08, "grad_norm": 22.57551383972168, "learning_rate": 3.3875555555555556e-05, "loss": 1.3509, "step": 4885 }, { "epoch": 39.088, "grad_norm": 19.056814193725586, "learning_rate": 3.387111111111112e-05, "loss": 0.9435, "step": 4886 }, { "epoch": 39.096, "grad_norm": 28.017189025878906, "learning_rate": 3.3866666666666665e-05, "loss": 1.1201, "step": 4887 }, { "epoch": 39.104, "grad_norm": 28.99880599975586, "learning_rate": 3.386222222222222e-05, "loss": 1.2862, "step": 4888 }, { "epoch": 39.112, "grad_norm": 40.98435974121094, "learning_rate": 3.3857777777777775e-05, "loss": 1.9959, "step": 4889 }, { "epoch": 39.12, "grad_norm": 26.49435806274414, "learning_rate": 3.385333333333334e-05, "loss": 0.9858, "step": 4890 }, { "epoch": 39.128, "grad_norm": 57.013694763183594, "learning_rate": 3.384888888888889e-05, "loss": 0.913, "step": 4891 }, { "epoch": 39.136, "grad_norm": 14.395467758178711, "learning_rate": 3.3844444444444446e-05, "loss": 1.1081, "step": 4892 }, { "epoch": 39.144, "grad_norm": 26.007631301879883, "learning_rate": 3.384e-05, "loss": 1.2892, "step": 4893 }, { "epoch": 39.152, "grad_norm": 40.44033432006836, "learning_rate": 3.3835555555555556e-05, "loss": 1.8745, "step": 4894 }, { "epoch": 39.16, "grad_norm": 30.671661376953125, "learning_rate": 3.383111111111111e-05, "loss": 1.3689, "step": 4895 }, { "epoch": 39.168, "grad_norm": 18.254926681518555, "learning_rate": 3.3826666666666666e-05, "loss": 1.0783, "step": 4896 }, { "epoch": 39.176, "grad_norm": 27.35541343688965, "learning_rate": 3.382222222222222e-05, "loss": 1.0169, "step": 4897 }, { "epoch": 39.184, "grad_norm": 32.19794464111328, "learning_rate": 3.381777777777778e-05, "loss": 1.314, "step": 4898 }, { "epoch": 39.192, "grad_norm": 16.48601531982422, "learning_rate": 3.381333333333334e-05, "loss": 1.3456, "step": 4899 }, { "epoch": 39.2, "grad_norm": 26.183109283447266, "learning_rate": 3.380888888888889e-05, "loss": 0.9318, "step": 4900 }, { "epoch": 39.208, "grad_norm": 31.632585525512695, "learning_rate": 3.380444444444445e-05, "loss": 1.0734, "step": 4901 }, { "epoch": 39.216, "grad_norm": 19.452369689941406, "learning_rate": 3.38e-05, "loss": 0.7736, "step": 4902 }, { "epoch": 39.224, "grad_norm": 27.349714279174805, "learning_rate": 3.3795555555555556e-05, "loss": 1.3101, "step": 4903 }, { "epoch": 39.232, "grad_norm": 40.76564407348633, "learning_rate": 3.379111111111111e-05, "loss": 1.2434, "step": 4904 }, { "epoch": 39.24, "grad_norm": 58.7111930847168, "learning_rate": 3.3786666666666666e-05, "loss": 1.0172, "step": 4905 }, { "epoch": 39.248, "grad_norm": 77.7610855102539, "learning_rate": 3.378222222222223e-05, "loss": 0.6407, "step": 4906 }, { "epoch": 39.256, "grad_norm": 18.969614028930664, "learning_rate": 3.377777777777778e-05, "loss": 1.2894, "step": 4907 }, { "epoch": 39.264, "grad_norm": 28.302213668823242, "learning_rate": 3.377333333333334e-05, "loss": 0.8509, "step": 4908 }, { "epoch": 39.272, "grad_norm": 17.59432029724121, "learning_rate": 3.3768888888888886e-05, "loss": 1.1339, "step": 4909 }, { "epoch": 39.28, "grad_norm": 21.07935905456543, "learning_rate": 3.376444444444445e-05, "loss": 1.1151, "step": 4910 }, { "epoch": 39.288, "grad_norm": 32.017425537109375, "learning_rate": 3.376e-05, "loss": 0.9464, "step": 4911 }, { "epoch": 39.296, "grad_norm": 46.37567901611328, "learning_rate": 3.375555555555556e-05, "loss": 1.3134, "step": 4912 }, { "epoch": 39.304, "grad_norm": 19.124637603759766, "learning_rate": 3.375111111111111e-05, "loss": 1.0494, "step": 4913 }, { "epoch": 39.312, "grad_norm": 16.675893783569336, "learning_rate": 3.374666666666667e-05, "loss": 0.8751, "step": 4914 }, { "epoch": 39.32, "grad_norm": 24.73723602294922, "learning_rate": 3.374222222222223e-05, "loss": 0.9318, "step": 4915 }, { "epoch": 39.328, "grad_norm": 41.063987731933594, "learning_rate": 3.3737777777777776e-05, "loss": 1.2611, "step": 4916 }, { "epoch": 39.336, "grad_norm": 28.19845199584961, "learning_rate": 3.373333333333333e-05, "loss": 1.4353, "step": 4917 }, { "epoch": 39.344, "grad_norm": 29.03074836730957, "learning_rate": 3.372888888888889e-05, "loss": 0.9677, "step": 4918 }, { "epoch": 39.352, "grad_norm": 60.87798309326172, "learning_rate": 3.372444444444445e-05, "loss": 2.7505, "step": 4919 }, { "epoch": 39.36, "grad_norm": 13.764101028442383, "learning_rate": 3.372e-05, "loss": 1.1089, "step": 4920 }, { "epoch": 39.368, "grad_norm": 39.38951873779297, "learning_rate": 3.371555555555556e-05, "loss": 1.2676, "step": 4921 }, { "epoch": 39.376, "grad_norm": 31.5687255859375, "learning_rate": 3.371111111111111e-05, "loss": 1.0093, "step": 4922 }, { "epoch": 39.384, "grad_norm": 16.571224212646484, "learning_rate": 3.370666666666667e-05, "loss": 1.0067, "step": 4923 }, { "epoch": 39.392, "grad_norm": 29.912811279296875, "learning_rate": 3.370222222222222e-05, "loss": 1.2294, "step": 4924 }, { "epoch": 39.4, "grad_norm": 18.64929962158203, "learning_rate": 3.369777777777778e-05, "loss": 0.9815, "step": 4925 }, { "epoch": 39.408, "grad_norm": 25.269575119018555, "learning_rate": 3.369333333333333e-05, "loss": 1.1946, "step": 4926 }, { "epoch": 39.416, "grad_norm": 35.436649322509766, "learning_rate": 3.368888888888889e-05, "loss": 1.4731, "step": 4927 }, { "epoch": 39.424, "grad_norm": 26.398832321166992, "learning_rate": 3.368444444444445e-05, "loss": 1.1993, "step": 4928 }, { "epoch": 39.432, "grad_norm": 18.54703140258789, "learning_rate": 3.368e-05, "loss": 1.0429, "step": 4929 }, { "epoch": 39.44, "grad_norm": 52.00208282470703, "learning_rate": 3.367555555555556e-05, "loss": 1.2291, "step": 4930 }, { "epoch": 39.448, "grad_norm": 37.15858840942383, "learning_rate": 3.367111111111111e-05, "loss": 2.4274, "step": 4931 }, { "epoch": 39.456, "grad_norm": 13.337949752807617, "learning_rate": 3.366666666666667e-05, "loss": 1.3022, "step": 4932 }, { "epoch": 39.464, "grad_norm": 30.62910270690918, "learning_rate": 3.366222222222222e-05, "loss": 1.197, "step": 4933 }, { "epoch": 39.472, "grad_norm": 19.940149307250977, "learning_rate": 3.365777777777778e-05, "loss": 1.0561, "step": 4934 }, { "epoch": 39.48, "grad_norm": 16.67277717590332, "learning_rate": 3.365333333333334e-05, "loss": 0.7632, "step": 4935 }, { "epoch": 39.488, "grad_norm": 22.972732543945312, "learning_rate": 3.3648888888888893e-05, "loss": 0.9421, "step": 4936 }, { "epoch": 39.496, "grad_norm": 15.324837684631348, "learning_rate": 3.364444444444445e-05, "loss": 0.8631, "step": 4937 }, { "epoch": 39.504, "grad_norm": 13.49526596069336, "learning_rate": 3.3639999999999996e-05, "loss": 1.0798, "step": 4938 }, { "epoch": 39.512, "grad_norm": 39.553382873535156, "learning_rate": 3.363555555555556e-05, "loss": 0.8937, "step": 4939 }, { "epoch": 39.52, "grad_norm": 28.452533721923828, "learning_rate": 3.363111111111111e-05, "loss": 0.6583, "step": 4940 }, { "epoch": 39.528, "grad_norm": 140.9770050048828, "learning_rate": 3.362666666666667e-05, "loss": 1.5353, "step": 4941 }, { "epoch": 39.536, "grad_norm": 24.38686752319336, "learning_rate": 3.362222222222222e-05, "loss": 1.3265, "step": 4942 }, { "epoch": 39.544, "grad_norm": 35.23424530029297, "learning_rate": 3.3617777777777784e-05, "loss": 0.741, "step": 4943 }, { "epoch": 39.552, "grad_norm": 24.392473220825195, "learning_rate": 3.361333333333333e-05, "loss": 1.1873, "step": 4944 }, { "epoch": 39.56, "grad_norm": 59.92893600463867, "learning_rate": 3.360888888888889e-05, "loss": 0.9124, "step": 4945 }, { "epoch": 39.568, "grad_norm": 76.856689453125, "learning_rate": 3.360444444444444e-05, "loss": 1.4104, "step": 4946 }, { "epoch": 39.576, "grad_norm": 57.76030349731445, "learning_rate": 3.3600000000000004e-05, "loss": 1.0513, "step": 4947 }, { "epoch": 39.584, "grad_norm": 28.782136917114258, "learning_rate": 3.359555555555556e-05, "loss": 0.8354, "step": 4948 }, { "epoch": 39.592, "grad_norm": 26.812284469604492, "learning_rate": 3.359111111111111e-05, "loss": 1.094, "step": 4949 }, { "epoch": 39.6, "grad_norm": 188.4306640625, "learning_rate": 3.358666666666667e-05, "loss": 0.9812, "step": 4950 }, { "epoch": 39.608, "grad_norm": 54.57023239135742, "learning_rate": 3.358222222222222e-05, "loss": 0.8308, "step": 4951 }, { "epoch": 39.616, "grad_norm": 84.09786987304688, "learning_rate": 3.357777777777778e-05, "loss": 1.4039, "step": 4952 }, { "epoch": 39.624, "grad_norm": 31.385421752929688, "learning_rate": 3.357333333333333e-05, "loss": 1.1123, "step": 4953 }, { "epoch": 39.632, "grad_norm": 23.814617156982422, "learning_rate": 3.356888888888889e-05, "loss": 1.0942, "step": 4954 }, { "epoch": 39.64, "grad_norm": 33.712303161621094, "learning_rate": 3.356444444444445e-05, "loss": 1.0614, "step": 4955 }, { "epoch": 39.648, "grad_norm": 23.60978126525879, "learning_rate": 3.3560000000000004e-05, "loss": 1.0243, "step": 4956 }, { "epoch": 39.656, "grad_norm": 34.98139572143555, "learning_rate": 3.355555555555556e-05, "loss": 1.218, "step": 4957 }, { "epoch": 39.664, "grad_norm": 13.512483596801758, "learning_rate": 3.3551111111111114e-05, "loss": 0.6109, "step": 4958 }, { "epoch": 39.672, "grad_norm": 32.060546875, "learning_rate": 3.354666666666667e-05, "loss": 1.065, "step": 4959 }, { "epoch": 39.68, "grad_norm": 30.15795135498047, "learning_rate": 3.354222222222222e-05, "loss": 1.1348, "step": 4960 }, { "epoch": 39.688, "grad_norm": 14.965925216674805, "learning_rate": 3.353777777777778e-05, "loss": 1.4157, "step": 4961 }, { "epoch": 39.696, "grad_norm": 14.038163185119629, "learning_rate": 3.353333333333333e-05, "loss": 1.1766, "step": 4962 }, { "epoch": 39.704, "grad_norm": 20.922603607177734, "learning_rate": 3.3528888888888895e-05, "loss": 1.1326, "step": 4963 }, { "epoch": 39.712, "grad_norm": 33.8702392578125, "learning_rate": 3.352444444444445e-05, "loss": 1.4409, "step": 4964 }, { "epoch": 39.72, "grad_norm": 35.55323791503906, "learning_rate": 3.3520000000000004e-05, "loss": 1.4731, "step": 4965 }, { "epoch": 39.728, "grad_norm": 53.38612747192383, "learning_rate": 3.351555555555555e-05, "loss": 1.2693, "step": 4966 }, { "epoch": 39.736, "grad_norm": 41.35847091674805, "learning_rate": 3.3511111111111114e-05, "loss": 1.0479, "step": 4967 }, { "epoch": 39.744, "grad_norm": 21.86907958984375, "learning_rate": 3.350666666666667e-05, "loss": 1.3837, "step": 4968 }, { "epoch": 39.752, "grad_norm": 25.997045516967773, "learning_rate": 3.3502222222222224e-05, "loss": 1.4952, "step": 4969 }, { "epoch": 39.76, "grad_norm": 26.483430862426758, "learning_rate": 3.349777777777778e-05, "loss": 1.1373, "step": 4970 }, { "epoch": 39.768, "grad_norm": 17.647260665893555, "learning_rate": 3.349333333333334e-05, "loss": 1.1896, "step": 4971 }, { "epoch": 39.776, "grad_norm": 24.533611297607422, "learning_rate": 3.3488888888888895e-05, "loss": 1.2523, "step": 4972 }, { "epoch": 39.784, "grad_norm": 45.2606086730957, "learning_rate": 3.348444444444444e-05, "loss": 1.5175, "step": 4973 }, { "epoch": 39.792, "grad_norm": 22.900503158569336, "learning_rate": 3.348e-05, "loss": 1.1273, "step": 4974 }, { "epoch": 39.8, "grad_norm": 36.34379577636719, "learning_rate": 3.347555555555555e-05, "loss": 1.4203, "step": 4975 }, { "epoch": 39.808, "grad_norm": 22.697769165039062, "learning_rate": 3.3471111111111114e-05, "loss": 0.9954, "step": 4976 }, { "epoch": 39.816, "grad_norm": 22.26141357421875, "learning_rate": 3.346666666666667e-05, "loss": 1.0478, "step": 4977 }, { "epoch": 39.824, "grad_norm": 111.2901840209961, "learning_rate": 3.3462222222222224e-05, "loss": 1.251, "step": 4978 }, { "epoch": 39.832, "grad_norm": 68.60845947265625, "learning_rate": 3.345777777777778e-05, "loss": 1.8901, "step": 4979 }, { "epoch": 39.84, "grad_norm": 26.07238006591797, "learning_rate": 3.3453333333333334e-05, "loss": 1.1788, "step": 4980 }, { "epoch": 39.848, "grad_norm": 15.762788772583008, "learning_rate": 3.344888888888889e-05, "loss": 0.9117, "step": 4981 }, { "epoch": 39.856, "grad_norm": 24.83635139465332, "learning_rate": 3.3444444444444443e-05, "loss": 1.3207, "step": 4982 }, { "epoch": 39.864, "grad_norm": 32.95915603637695, "learning_rate": 3.344e-05, "loss": 1.3305, "step": 4983 }, { "epoch": 39.872, "grad_norm": 34.54612731933594, "learning_rate": 3.343555555555556e-05, "loss": 1.1311, "step": 4984 }, { "epoch": 39.88, "grad_norm": 25.76846694946289, "learning_rate": 3.3431111111111115e-05, "loss": 1.2456, "step": 4985 }, { "epoch": 39.888, "grad_norm": 18.642169952392578, "learning_rate": 3.342666666666667e-05, "loss": 1.1511, "step": 4986 }, { "epoch": 39.896, "grad_norm": 47.99332809448242, "learning_rate": 3.3422222222222224e-05, "loss": 0.9773, "step": 4987 }, { "epoch": 39.904, "grad_norm": 44.236366271972656, "learning_rate": 3.341777777777778e-05, "loss": 1.2754, "step": 4988 }, { "epoch": 39.912, "grad_norm": 20.04837417602539, "learning_rate": 3.3413333333333334e-05, "loss": 0.7252, "step": 4989 }, { "epoch": 39.92, "grad_norm": 69.60343170166016, "learning_rate": 3.340888888888889e-05, "loss": 0.9977, "step": 4990 }, { "epoch": 39.928, "grad_norm": 43.190086364746094, "learning_rate": 3.3404444444444444e-05, "loss": 1.6309, "step": 4991 }, { "epoch": 39.936, "grad_norm": 80.01154327392578, "learning_rate": 3.3400000000000005e-05, "loss": 0.9869, "step": 4992 }, { "epoch": 39.944, "grad_norm": 16.874876022338867, "learning_rate": 3.339555555555556e-05, "loss": 1.0226, "step": 4993 }, { "epoch": 39.952, "grad_norm": 28.514230728149414, "learning_rate": 3.3391111111111115e-05, "loss": 1.0718, "step": 4994 }, { "epoch": 39.96, "grad_norm": 38.5604248046875, "learning_rate": 3.338666666666666e-05, "loss": 0.9011, "step": 4995 }, { "epoch": 39.968, "grad_norm": 23.220964431762695, "learning_rate": 3.3382222222222225e-05, "loss": 1.1661, "step": 4996 }, { "epoch": 39.976, "grad_norm": 19.717893600463867, "learning_rate": 3.337777777777778e-05, "loss": 0.9902, "step": 4997 }, { "epoch": 39.984, "grad_norm": 22.375, "learning_rate": 3.3373333333333335e-05, "loss": 1.1759, "step": 4998 }, { "epoch": 39.992, "grad_norm": 30.85069465637207, "learning_rate": 3.336888888888889e-05, "loss": 1.2504, "step": 4999 }, { "epoch": 40.0, "grad_norm": 30.95490837097168, "learning_rate": 3.336444444444445e-05, "loss": 1.4063, "step": 5000 }, { "epoch": 40.0, "eval_loss": 1.1877496242523193, "eval_map": 0.3313, "eval_map_50": 0.6584, "eval_map_75": 0.3132, "eval_map_Coverall": 0.5987, "eval_map_Face_Shield": 0.3077, "eval_map_Gloves": 0.2525, "eval_map_Goggles": 0.098, "eval_map_Mask": 0.3996, "eval_map_large": 0.5795, "eval_map_medium": 0.1885, "eval_map_small": 0.2286, "eval_mar_1": 0.2833, "eval_mar_10": 0.5074, "eval_mar_100": 0.532, "eval_mar_100_Coverall": 0.7244, "eval_mar_100_Face_Shield": 0.6471, "eval_mar_100_Gloves": 0.3967, "eval_mar_100_Goggles": 0.4031, "eval_mar_100_Mask": 0.4885, "eval_mar_large": 0.6835, "eval_mar_medium": 0.3836, "eval_mar_small": 0.3596, "eval_runtime": 0.9414, "eval_samples_per_second": 30.806, "eval_steps_per_second": 2.125, "step": 5000 }, { "epoch": 40.008, "grad_norm": 25.055927276611328, "learning_rate": 3.336e-05, "loss": 0.8126, "step": 5001 }, { "epoch": 40.016, "grad_norm": 23.217437744140625, "learning_rate": 3.3355555555555554e-05, "loss": 0.8334, "step": 5002 }, { "epoch": 40.024, "grad_norm": 26.288850784301758, "learning_rate": 3.335111111111111e-05, "loss": 1.0298, "step": 5003 }, { "epoch": 40.032, "grad_norm": 29.847911834716797, "learning_rate": 3.334666666666667e-05, "loss": 1.4115, "step": 5004 }, { "epoch": 40.04, "grad_norm": 23.294422149658203, "learning_rate": 3.3342222222222225e-05, "loss": 1.1088, "step": 5005 }, { "epoch": 40.048, "grad_norm": 18.772459030151367, "learning_rate": 3.333777777777778e-05, "loss": 0.7683, "step": 5006 }, { "epoch": 40.056, "grad_norm": 22.693965911865234, "learning_rate": 3.3333333333333335e-05, "loss": 1.1812, "step": 5007 }, { "epoch": 40.064, "grad_norm": 24.164213180541992, "learning_rate": 3.332888888888889e-05, "loss": 1.2533, "step": 5008 }, { "epoch": 40.072, "grad_norm": 20.21295738220215, "learning_rate": 3.3324444444444445e-05, "loss": 1.1413, "step": 5009 }, { "epoch": 40.08, "grad_norm": 51.15434265136719, "learning_rate": 3.332e-05, "loss": 1.244, "step": 5010 }, { "epoch": 40.088, "grad_norm": 20.751995086669922, "learning_rate": 3.3315555555555554e-05, "loss": 1.1198, "step": 5011 }, { "epoch": 40.096, "grad_norm": 35.45619201660156, "learning_rate": 3.3311111111111116e-05, "loss": 1.2567, "step": 5012 }, { "epoch": 40.104, "grad_norm": 35.595096588134766, "learning_rate": 3.330666666666667e-05, "loss": 2.6227, "step": 5013 }, { "epoch": 40.112, "grad_norm": 29.332225799560547, "learning_rate": 3.3302222222222226e-05, "loss": 0.8844, "step": 5014 }, { "epoch": 40.12, "grad_norm": 22.911062240600586, "learning_rate": 3.329777777777778e-05, "loss": 0.9937, "step": 5015 }, { "epoch": 40.128, "grad_norm": 73.88036346435547, "learning_rate": 3.3293333333333335e-05, "loss": 0.9458, "step": 5016 }, { "epoch": 40.136, "grad_norm": 21.222158432006836, "learning_rate": 3.328888888888889e-05, "loss": 1.2703, "step": 5017 }, { "epoch": 40.144, "grad_norm": 22.687307357788086, "learning_rate": 3.3284444444444445e-05, "loss": 0.8221, "step": 5018 }, { "epoch": 40.152, "grad_norm": 14.984850883483887, "learning_rate": 3.328e-05, "loss": 1.0668, "step": 5019 }, { "epoch": 40.16, "grad_norm": 22.187477111816406, "learning_rate": 3.327555555555556e-05, "loss": 1.0832, "step": 5020 }, { "epoch": 40.168, "grad_norm": 20.662372589111328, "learning_rate": 3.3271111111111116e-05, "loss": 1.8959, "step": 5021 }, { "epoch": 40.176, "grad_norm": 11.4114408493042, "learning_rate": 3.326666666666667e-05, "loss": 0.9232, "step": 5022 }, { "epoch": 40.184, "grad_norm": 20.612180709838867, "learning_rate": 3.326222222222222e-05, "loss": 1.1535, "step": 5023 }, { "epoch": 40.192, "grad_norm": 25.15695571899414, "learning_rate": 3.325777777777778e-05, "loss": 1.2505, "step": 5024 }, { "epoch": 40.2, "grad_norm": 22.15402603149414, "learning_rate": 3.3253333333333336e-05, "loss": 1.0951, "step": 5025 }, { "epoch": 40.208, "grad_norm": 31.34897804260254, "learning_rate": 3.324888888888889e-05, "loss": 1.1539, "step": 5026 }, { "epoch": 40.216, "grad_norm": 19.997394561767578, "learning_rate": 3.3244444444444445e-05, "loss": 0.8015, "step": 5027 }, { "epoch": 40.224, "grad_norm": 24.481754302978516, "learning_rate": 3.324e-05, "loss": 0.9141, "step": 5028 }, { "epoch": 40.232, "grad_norm": 37.926666259765625, "learning_rate": 3.323555555555556e-05, "loss": 1.1231, "step": 5029 }, { "epoch": 40.24, "grad_norm": 21.782859802246094, "learning_rate": 3.323111111111111e-05, "loss": 1.2309, "step": 5030 }, { "epoch": 40.248, "grad_norm": 28.77414894104004, "learning_rate": 3.3226666666666665e-05, "loss": 1.101, "step": 5031 }, { "epoch": 40.256, "grad_norm": 30.41433334350586, "learning_rate": 3.322222222222222e-05, "loss": 1.0217, "step": 5032 }, { "epoch": 40.264, "grad_norm": 20.664283752441406, "learning_rate": 3.321777777777778e-05, "loss": 0.9273, "step": 5033 }, { "epoch": 40.272, "grad_norm": 28.392009735107422, "learning_rate": 3.3213333333333336e-05, "loss": 1.1611, "step": 5034 }, { "epoch": 40.28, "grad_norm": 41.530921936035156, "learning_rate": 3.320888888888889e-05, "loss": 1.5966, "step": 5035 }, { "epoch": 40.288, "grad_norm": 23.54444122314453, "learning_rate": 3.3204444444444446e-05, "loss": 1.1857, "step": 5036 }, { "epoch": 40.296, "grad_norm": 36.145103454589844, "learning_rate": 3.32e-05, "loss": 1.1328, "step": 5037 }, { "epoch": 40.304, "grad_norm": 33.83261489868164, "learning_rate": 3.3195555555555555e-05, "loss": 1.1419, "step": 5038 }, { "epoch": 40.312, "grad_norm": 22.079429626464844, "learning_rate": 3.319111111111111e-05, "loss": 1.2629, "step": 5039 }, { "epoch": 40.32, "grad_norm": 29.163829803466797, "learning_rate": 3.3186666666666665e-05, "loss": 1.2337, "step": 5040 }, { "epoch": 40.328, "grad_norm": 23.330883026123047, "learning_rate": 3.318222222222223e-05, "loss": 1.1472, "step": 5041 }, { "epoch": 40.336, "grad_norm": 18.363529205322266, "learning_rate": 3.317777777777778e-05, "loss": 1.144, "step": 5042 }, { "epoch": 40.344, "grad_norm": 38.10127258300781, "learning_rate": 3.3173333333333336e-05, "loss": 0.9343, "step": 5043 }, { "epoch": 40.352, "grad_norm": 60.477142333984375, "learning_rate": 3.316888888888889e-05, "loss": 1.0396, "step": 5044 }, { "epoch": 40.36, "grad_norm": 96.84346008300781, "learning_rate": 3.3164444444444446e-05, "loss": 1.1958, "step": 5045 }, { "epoch": 40.368, "grad_norm": 83.41908264160156, "learning_rate": 3.316e-05, "loss": 0.8914, "step": 5046 }, { "epoch": 40.376, "grad_norm": 46.0658073425293, "learning_rate": 3.3155555555555556e-05, "loss": 0.9858, "step": 5047 }, { "epoch": 40.384, "grad_norm": 20.177274703979492, "learning_rate": 3.315111111111111e-05, "loss": 1.3128, "step": 5048 }, { "epoch": 40.392, "grad_norm": 38.66389465332031, "learning_rate": 3.314666666666667e-05, "loss": 1.3327, "step": 5049 }, { "epoch": 40.4, "grad_norm": 31.279178619384766, "learning_rate": 3.314222222222223e-05, "loss": 2.0924, "step": 5050 }, { "epoch": 40.408, "grad_norm": 23.351078033447266, "learning_rate": 3.313777777777778e-05, "loss": 1.0907, "step": 5051 }, { "epoch": 40.416, "grad_norm": 45.16535186767578, "learning_rate": 3.313333333333333e-05, "loss": 1.5818, "step": 5052 }, { "epoch": 40.424, "grad_norm": 30.14053726196289, "learning_rate": 3.312888888888889e-05, "loss": 0.8231, "step": 5053 }, { "epoch": 40.432, "grad_norm": 77.50425720214844, "learning_rate": 3.3124444444444447e-05, "loss": 1.0957, "step": 5054 }, { "epoch": 40.44, "grad_norm": 17.583955764770508, "learning_rate": 3.312e-05, "loss": 1.3868, "step": 5055 }, { "epoch": 40.448, "grad_norm": 33.77965545654297, "learning_rate": 3.3115555555555556e-05, "loss": 1.1918, "step": 5056 }, { "epoch": 40.456, "grad_norm": 16.091882705688477, "learning_rate": 3.311111111111112e-05, "loss": 1.1586, "step": 5057 }, { "epoch": 40.464, "grad_norm": 21.36298179626465, "learning_rate": 3.3106666666666666e-05, "loss": 2.0113, "step": 5058 }, { "epoch": 40.472, "grad_norm": 28.972881317138672, "learning_rate": 3.310222222222222e-05, "loss": 1.0913, "step": 5059 }, { "epoch": 40.48, "grad_norm": 19.223581314086914, "learning_rate": 3.3097777777777776e-05, "loss": 1.5671, "step": 5060 }, { "epoch": 40.488, "grad_norm": 50.02836227416992, "learning_rate": 3.309333333333334e-05, "loss": 0.9904, "step": 5061 }, { "epoch": 40.496, "grad_norm": 53.3050651550293, "learning_rate": 3.308888888888889e-05, "loss": 1.0676, "step": 5062 }, { "epoch": 40.504, "grad_norm": 48.88274002075195, "learning_rate": 3.308444444444445e-05, "loss": 0.9352, "step": 5063 }, { "epoch": 40.512, "grad_norm": 20.7290096282959, "learning_rate": 3.308e-05, "loss": 1.5878, "step": 5064 }, { "epoch": 40.52, "grad_norm": 33.80016326904297, "learning_rate": 3.3075555555555557e-05, "loss": 1.2275, "step": 5065 }, { "epoch": 40.528, "grad_norm": 32.525543212890625, "learning_rate": 3.307111111111111e-05, "loss": 1.0526, "step": 5066 }, { "epoch": 40.536, "grad_norm": 32.06709289550781, "learning_rate": 3.3066666666666666e-05, "loss": 1.0782, "step": 5067 }, { "epoch": 40.544, "grad_norm": 28.30950927734375, "learning_rate": 3.306222222222222e-05, "loss": 1.1029, "step": 5068 }, { "epoch": 40.552, "grad_norm": 19.420202255249023, "learning_rate": 3.305777777777778e-05, "loss": 1.2768, "step": 5069 }, { "epoch": 40.56, "grad_norm": 36.466426849365234, "learning_rate": 3.305333333333334e-05, "loss": 1.269, "step": 5070 }, { "epoch": 40.568, "grad_norm": 26.88462257385254, "learning_rate": 3.304888888888889e-05, "loss": 1.0675, "step": 5071 }, { "epoch": 40.576, "grad_norm": 19.6346435546875, "learning_rate": 3.304444444444445e-05, "loss": 1.1126, "step": 5072 }, { "epoch": 40.584, "grad_norm": 33.85456466674805, "learning_rate": 3.304e-05, "loss": 1.3994, "step": 5073 }, { "epoch": 40.592, "grad_norm": 29.196123123168945, "learning_rate": 3.303555555555556e-05, "loss": 2.4616, "step": 5074 }, { "epoch": 40.6, "grad_norm": 19.12900733947754, "learning_rate": 3.303111111111111e-05, "loss": 1.0055, "step": 5075 }, { "epoch": 40.608, "grad_norm": 23.093339920043945, "learning_rate": 3.302666666666667e-05, "loss": 0.9726, "step": 5076 }, { "epoch": 40.616, "grad_norm": 31.61528968811035, "learning_rate": 3.302222222222222e-05, "loss": 1.2187, "step": 5077 }, { "epoch": 40.624, "grad_norm": 14.759888648986816, "learning_rate": 3.301777777777778e-05, "loss": 0.691, "step": 5078 }, { "epoch": 40.632, "grad_norm": 28.84674072265625, "learning_rate": 3.301333333333334e-05, "loss": 0.9809, "step": 5079 }, { "epoch": 40.64, "grad_norm": 25.612266540527344, "learning_rate": 3.3008888888888886e-05, "loss": 1.0898, "step": 5080 }, { "epoch": 40.648, "grad_norm": 70.79100036621094, "learning_rate": 3.300444444444444e-05, "loss": 0.9413, "step": 5081 }, { "epoch": 40.656, "grad_norm": 38.25236511230469, "learning_rate": 3.3e-05, "loss": 1.0841, "step": 5082 }, { "epoch": 40.664, "grad_norm": 22.481298446655273, "learning_rate": 3.299555555555556e-05, "loss": 0.9267, "step": 5083 }, { "epoch": 40.672, "grad_norm": 21.574434280395508, "learning_rate": 3.299111111111111e-05, "loss": 1.2117, "step": 5084 }, { "epoch": 40.68, "grad_norm": 28.403858184814453, "learning_rate": 3.298666666666667e-05, "loss": 0.9249, "step": 5085 }, { "epoch": 40.688, "grad_norm": 17.511051177978516, "learning_rate": 3.298222222222223e-05, "loss": 0.9892, "step": 5086 }, { "epoch": 40.696, "grad_norm": 21.712350845336914, "learning_rate": 3.297777777777778e-05, "loss": 1.0832, "step": 5087 }, { "epoch": 40.704, "grad_norm": 36.742374420166016, "learning_rate": 3.297333333333333e-05, "loss": 1.1027, "step": 5088 }, { "epoch": 40.712, "grad_norm": 22.15849494934082, "learning_rate": 3.2968888888888886e-05, "loss": 0.8768, "step": 5089 }, { "epoch": 40.72, "grad_norm": 32.384517669677734, "learning_rate": 3.296444444444445e-05, "loss": 0.7867, "step": 5090 }, { "epoch": 40.728, "grad_norm": 24.086294174194336, "learning_rate": 3.296e-05, "loss": 0.9959, "step": 5091 }, { "epoch": 40.736, "grad_norm": 24.210933685302734, "learning_rate": 3.295555555555556e-05, "loss": 1.0171, "step": 5092 }, { "epoch": 40.744, "grad_norm": 58.169158935546875, "learning_rate": 3.295111111111111e-05, "loss": 1.1591, "step": 5093 }, { "epoch": 40.752, "grad_norm": 36.50046157836914, "learning_rate": 3.294666666666667e-05, "loss": 1.1962, "step": 5094 }, { "epoch": 40.76, "grad_norm": 52.92119598388672, "learning_rate": 3.294222222222222e-05, "loss": 1.4474, "step": 5095 }, { "epoch": 40.768, "grad_norm": 23.820520401000977, "learning_rate": 3.293777777777778e-05, "loss": 1.4309, "step": 5096 }, { "epoch": 40.776, "grad_norm": 26.66062355041504, "learning_rate": 3.293333333333333e-05, "loss": 0.9002, "step": 5097 }, { "epoch": 40.784, "grad_norm": 25.46244239807129, "learning_rate": 3.2928888888888894e-05, "loss": 1.2764, "step": 5098 }, { "epoch": 40.792, "grad_norm": 85.10192108154297, "learning_rate": 3.292444444444445e-05, "loss": 1.4463, "step": 5099 }, { "epoch": 40.8, "grad_norm": 29.98067283630371, "learning_rate": 3.292e-05, "loss": 1.0151, "step": 5100 }, { "epoch": 40.808, "grad_norm": 20.348865509033203, "learning_rate": 3.291555555555556e-05, "loss": 1.1408, "step": 5101 }, { "epoch": 40.816, "grad_norm": 46.668357849121094, "learning_rate": 3.291111111111111e-05, "loss": 1.22, "step": 5102 }, { "epoch": 40.824, "grad_norm": 27.671894073486328, "learning_rate": 3.290666666666667e-05, "loss": 1.1283, "step": 5103 }, { "epoch": 40.832, "grad_norm": 32.16703796386719, "learning_rate": 3.290222222222222e-05, "loss": 1.1655, "step": 5104 }, { "epoch": 40.84, "grad_norm": 46.03282928466797, "learning_rate": 3.289777777777778e-05, "loss": 1.0788, "step": 5105 }, { "epoch": 40.848, "grad_norm": 40.96993637084961, "learning_rate": 3.289333333333334e-05, "loss": 1.1524, "step": 5106 }, { "epoch": 40.856, "grad_norm": 95.71407318115234, "learning_rate": 3.2888888888888894e-05, "loss": 0.8536, "step": 5107 }, { "epoch": 40.864, "grad_norm": 38.55078887939453, "learning_rate": 3.288444444444445e-05, "loss": 1.3478, "step": 5108 }, { "epoch": 40.872, "grad_norm": 34.439613342285156, "learning_rate": 3.288e-05, "loss": 0.9639, "step": 5109 }, { "epoch": 40.88, "grad_norm": 26.92691421508789, "learning_rate": 3.287555555555556e-05, "loss": 1.0708, "step": 5110 }, { "epoch": 40.888, "grad_norm": 73.86912536621094, "learning_rate": 3.287111111111111e-05, "loss": 1.2203, "step": 5111 }, { "epoch": 40.896, "grad_norm": 30.48409652709961, "learning_rate": 3.286666666666667e-05, "loss": 0.7038, "step": 5112 }, { "epoch": 40.904, "grad_norm": 23.79740333557129, "learning_rate": 3.286222222222222e-05, "loss": 1.0108, "step": 5113 }, { "epoch": 40.912, "grad_norm": 23.05986976623535, "learning_rate": 3.2857777777777785e-05, "loss": 1.367, "step": 5114 }, { "epoch": 40.92, "grad_norm": 37.931636810302734, "learning_rate": 3.285333333333333e-05, "loss": 1.3299, "step": 5115 }, { "epoch": 40.928, "grad_norm": 28.910680770874023, "learning_rate": 3.284888888888889e-05, "loss": 1.1606, "step": 5116 }, { "epoch": 40.936, "grad_norm": 26.469022750854492, "learning_rate": 3.284444444444444e-05, "loss": 1.4297, "step": 5117 }, { "epoch": 40.944, "grad_norm": 20.318283081054688, "learning_rate": 3.2840000000000004e-05, "loss": 0.7689, "step": 5118 }, { "epoch": 40.952, "grad_norm": 16.532373428344727, "learning_rate": 3.283555555555556e-05, "loss": 1.8134, "step": 5119 }, { "epoch": 40.96, "grad_norm": 87.71897888183594, "learning_rate": 3.2831111111111114e-05, "loss": 1.2493, "step": 5120 }, { "epoch": 40.968, "grad_norm": 21.501447677612305, "learning_rate": 3.282666666666667e-05, "loss": 1.1614, "step": 5121 }, { "epoch": 40.976, "grad_norm": 26.547033309936523, "learning_rate": 3.2822222222222223e-05, "loss": 1.4146, "step": 5122 }, { "epoch": 40.984, "grad_norm": 51.68418884277344, "learning_rate": 3.281777777777778e-05, "loss": 1.1464, "step": 5123 }, { "epoch": 40.992, "grad_norm": 66.64419555664062, "learning_rate": 3.281333333333333e-05, "loss": 2.1196, "step": 5124 }, { "epoch": 41.0, "grad_norm": 21.662084579467773, "learning_rate": 3.280888888888889e-05, "loss": 1.1642, "step": 5125 }, { "epoch": 41.0, "eval_loss": 1.1853598356246948, "eval_map": 0.3624, "eval_map_50": 0.702, "eval_map_75": 0.3746, "eval_map_Coverall": 0.5682, "eval_map_Face_Shield": 0.4532, "eval_map_Gloves": 0.2188, "eval_map_Goggles": 0.2323, "eval_map_Mask": 0.3395, "eval_map_large": 0.5407, "eval_map_medium": 0.2522, "eval_map_small": 0.1667, "eval_mar_1": 0.2759, "eval_mar_10": 0.5016, "eval_mar_100": 0.5217, "eval_mar_100_Coverall": 0.7178, "eval_mar_100_Face_Shield": 0.6529, "eval_mar_100_Gloves": 0.3607, "eval_mar_100_Goggles": 0.45, "eval_mar_100_Mask": 0.4269, "eval_mar_large": 0.6436, "eval_mar_medium": 0.4094, "eval_mar_small": 0.2719, "eval_runtime": 0.9291, "eval_samples_per_second": 31.212, "eval_steps_per_second": 2.153, "step": 5125 }, { "epoch": 41.008, "grad_norm": 28.40699577331543, "learning_rate": 3.280444444444445e-05, "loss": 1.0725, "step": 5126 }, { "epoch": 41.016, "grad_norm": 43.049476623535156, "learning_rate": 3.2800000000000004e-05, "loss": 0.93, "step": 5127 }, { "epoch": 41.024, "grad_norm": 31.15970802307129, "learning_rate": 3.279555555555556e-05, "loss": 1.2098, "step": 5128 }, { "epoch": 41.032, "grad_norm": 37.004451751708984, "learning_rate": 3.2791111111111114e-05, "loss": 1.6271, "step": 5129 }, { "epoch": 41.04, "grad_norm": 19.393882751464844, "learning_rate": 3.278666666666666e-05, "loss": 1.0387, "step": 5130 }, { "epoch": 41.048, "grad_norm": 25.104766845703125, "learning_rate": 3.2782222222222224e-05, "loss": 1.4258, "step": 5131 }, { "epoch": 41.056, "grad_norm": 26.042966842651367, "learning_rate": 3.277777777777778e-05, "loss": 1.1405, "step": 5132 }, { "epoch": 41.064, "grad_norm": 47.20396041870117, "learning_rate": 3.2773333333333334e-05, "loss": 1.4026, "step": 5133 }, { "epoch": 41.072, "grad_norm": 70.71305847167969, "learning_rate": 3.276888888888889e-05, "loss": 1.0102, "step": 5134 }, { "epoch": 41.08, "grad_norm": 24.483959197998047, "learning_rate": 3.276444444444445e-05, "loss": 0.7685, "step": 5135 }, { "epoch": 41.088, "grad_norm": 25.04202651977539, "learning_rate": 3.2760000000000005e-05, "loss": 1.0075, "step": 5136 }, { "epoch": 41.096, "grad_norm": 17.24791717529297, "learning_rate": 3.275555555555555e-05, "loss": 0.8904, "step": 5137 }, { "epoch": 41.104, "grad_norm": 27.731021881103516, "learning_rate": 3.275111111111111e-05, "loss": 1.1097, "step": 5138 }, { "epoch": 41.112, "grad_norm": 17.849628448486328, "learning_rate": 3.274666666666667e-05, "loss": 0.9525, "step": 5139 }, { "epoch": 41.12, "grad_norm": 52.290340423583984, "learning_rate": 3.2742222222222224e-05, "loss": 0.7655, "step": 5140 }, { "epoch": 41.128, "grad_norm": 44.679141998291016, "learning_rate": 3.273777777777778e-05, "loss": 0.8424, "step": 5141 }, { "epoch": 41.136, "grad_norm": 19.92893409729004, "learning_rate": 3.2733333333333334e-05, "loss": 1.0424, "step": 5142 }, { "epoch": 41.144, "grad_norm": 28.984560012817383, "learning_rate": 3.2728888888888896e-05, "loss": 1.0572, "step": 5143 }, { "epoch": 41.152, "grad_norm": 27.561769485473633, "learning_rate": 3.2724444444444444e-05, "loss": 1.4408, "step": 5144 }, { "epoch": 41.16, "grad_norm": 18.60086441040039, "learning_rate": 3.272e-05, "loss": 1.1071, "step": 5145 }, { "epoch": 41.168, "grad_norm": 34.95810317993164, "learning_rate": 3.271555555555555e-05, "loss": 1.1209, "step": 5146 }, { "epoch": 41.176, "grad_norm": 40.129188537597656, "learning_rate": 3.2711111111111115e-05, "loss": 1.09, "step": 5147 }, { "epoch": 41.184, "grad_norm": 41.73360824584961, "learning_rate": 3.270666666666667e-05, "loss": 0.985, "step": 5148 }, { "epoch": 41.192, "grad_norm": 18.085140228271484, "learning_rate": 3.2702222222222225e-05, "loss": 1.0454, "step": 5149 }, { "epoch": 41.2, "grad_norm": 29.93191146850586, "learning_rate": 3.269777777777778e-05, "loss": 1.2552, "step": 5150 }, { "epoch": 41.208, "grad_norm": 33.27558517456055, "learning_rate": 3.2693333333333334e-05, "loss": 1.2429, "step": 5151 }, { "epoch": 41.216, "grad_norm": 88.79499053955078, "learning_rate": 3.268888888888889e-05, "loss": 0.9583, "step": 5152 }, { "epoch": 41.224, "grad_norm": 28.694944381713867, "learning_rate": 3.2684444444444444e-05, "loss": 1.6276, "step": 5153 }, { "epoch": 41.232, "grad_norm": 34.305973052978516, "learning_rate": 3.268e-05, "loss": 1.1703, "step": 5154 }, { "epoch": 41.24, "grad_norm": 28.068666458129883, "learning_rate": 3.267555555555556e-05, "loss": 0.975, "step": 5155 }, { "epoch": 41.248, "grad_norm": 20.79388999938965, "learning_rate": 3.2671111111111115e-05, "loss": 0.7354, "step": 5156 }, { "epoch": 41.256, "grad_norm": 25.072317123413086, "learning_rate": 3.266666666666667e-05, "loss": 0.8807, "step": 5157 }, { "epoch": 41.264, "grad_norm": 31.1124210357666, "learning_rate": 3.2662222222222225e-05, "loss": 0.8473, "step": 5158 }, { "epoch": 41.272, "grad_norm": 20.18535614013672, "learning_rate": 3.265777777777778e-05, "loss": 2.569, "step": 5159 }, { "epoch": 41.28, "grad_norm": 28.633100509643555, "learning_rate": 3.2653333333333335e-05, "loss": 1.2805, "step": 5160 }, { "epoch": 41.288, "grad_norm": 37.053077697753906, "learning_rate": 3.264888888888889e-05, "loss": 0.8351, "step": 5161 }, { "epoch": 41.296, "grad_norm": 17.691497802734375, "learning_rate": 3.2644444444444444e-05, "loss": 0.7107, "step": 5162 }, { "epoch": 41.304, "grad_norm": 22.200395584106445, "learning_rate": 3.2640000000000006e-05, "loss": 1.0609, "step": 5163 }, { "epoch": 41.312, "grad_norm": 33.68181228637695, "learning_rate": 3.263555555555556e-05, "loss": 0.8755, "step": 5164 }, { "epoch": 41.32, "grad_norm": 85.25009155273438, "learning_rate": 3.2631111111111116e-05, "loss": 1.0839, "step": 5165 }, { "epoch": 41.328, "grad_norm": 14.37791919708252, "learning_rate": 3.2626666666666664e-05, "loss": 1.2473, "step": 5166 }, { "epoch": 41.336, "grad_norm": 46.287662506103516, "learning_rate": 3.2622222222222225e-05, "loss": 0.9697, "step": 5167 }, { "epoch": 41.344, "grad_norm": 41.07746124267578, "learning_rate": 3.261777777777778e-05, "loss": 1.2987, "step": 5168 }, { "epoch": 41.352, "grad_norm": 19.627714157104492, "learning_rate": 3.2613333333333335e-05, "loss": 1.0985, "step": 5169 }, { "epoch": 41.36, "grad_norm": 28.719314575195312, "learning_rate": 3.260888888888889e-05, "loss": 1.2029, "step": 5170 }, { "epoch": 41.368, "grad_norm": 66.08983612060547, "learning_rate": 3.260444444444445e-05, "loss": 3.1214, "step": 5171 }, { "epoch": 41.376, "grad_norm": 32.860511779785156, "learning_rate": 3.26e-05, "loss": 1.0199, "step": 5172 }, { "epoch": 41.384, "grad_norm": 49.54412841796875, "learning_rate": 3.2595555555555554e-05, "loss": 1.5279, "step": 5173 }, { "epoch": 41.392, "grad_norm": 24.069931030273438, "learning_rate": 3.259111111111111e-05, "loss": 0.87, "step": 5174 }, { "epoch": 41.4, "grad_norm": 28.40826988220215, "learning_rate": 3.258666666666667e-05, "loss": 1.621, "step": 5175 }, { "epoch": 41.408, "grad_norm": 34.18496322631836, "learning_rate": 3.2582222222222226e-05, "loss": 1.4017, "step": 5176 }, { "epoch": 41.416, "grad_norm": 21.934635162353516, "learning_rate": 3.257777777777778e-05, "loss": 1.5441, "step": 5177 }, { "epoch": 41.424, "grad_norm": 18.32565689086914, "learning_rate": 3.2573333333333335e-05, "loss": 1.2539, "step": 5178 }, { "epoch": 41.432, "grad_norm": 20.51233673095703, "learning_rate": 3.256888888888889e-05, "loss": 1.0817, "step": 5179 }, { "epoch": 41.44, "grad_norm": 43.51518249511719, "learning_rate": 3.2564444444444445e-05, "loss": 0.9213, "step": 5180 }, { "epoch": 41.448, "grad_norm": 17.641868591308594, "learning_rate": 3.256e-05, "loss": 1.2888, "step": 5181 }, { "epoch": 41.456, "grad_norm": 22.874053955078125, "learning_rate": 3.2555555555555555e-05, "loss": 0.9866, "step": 5182 }, { "epoch": 41.464, "grad_norm": 20.223499298095703, "learning_rate": 3.255111111111111e-05, "loss": 1.3627, "step": 5183 }, { "epoch": 41.472, "grad_norm": 16.17841911315918, "learning_rate": 3.254666666666667e-05, "loss": 0.8204, "step": 5184 }, { "epoch": 41.48, "grad_norm": 19.03162384033203, "learning_rate": 3.2542222222222226e-05, "loss": 1.1428, "step": 5185 }, { "epoch": 41.488, "grad_norm": 67.88591003417969, "learning_rate": 3.253777777777778e-05, "loss": 1.0562, "step": 5186 }, { "epoch": 41.496, "grad_norm": 17.896629333496094, "learning_rate": 3.253333333333333e-05, "loss": 0.9382, "step": 5187 }, { "epoch": 41.504, "grad_norm": 27.984081268310547, "learning_rate": 3.252888888888889e-05, "loss": 1.097, "step": 5188 }, { "epoch": 41.512, "grad_norm": 39.60560607910156, "learning_rate": 3.2524444444444446e-05, "loss": 1.2013, "step": 5189 }, { "epoch": 41.52, "grad_norm": 31.670303344726562, "learning_rate": 3.252e-05, "loss": 1.6008, "step": 5190 }, { "epoch": 41.528, "grad_norm": 195.5677490234375, "learning_rate": 3.2515555555555555e-05, "loss": 1.26, "step": 5191 }, { "epoch": 41.536, "grad_norm": 19.10936164855957, "learning_rate": 3.251111111111112e-05, "loss": 1.3818, "step": 5192 }, { "epoch": 41.544, "grad_norm": 15.091988563537598, "learning_rate": 3.250666666666667e-05, "loss": 1.2179, "step": 5193 }, { "epoch": 41.552, "grad_norm": 26.08637237548828, "learning_rate": 3.250222222222222e-05, "loss": 1.1558, "step": 5194 }, { "epoch": 41.56, "grad_norm": 25.402572631835938, "learning_rate": 3.2497777777777775e-05, "loss": 0.9177, "step": 5195 }, { "epoch": 41.568, "grad_norm": 28.098865509033203, "learning_rate": 3.2493333333333336e-05, "loss": 0.9501, "step": 5196 }, { "epoch": 41.576, "grad_norm": 34.36576843261719, "learning_rate": 3.248888888888889e-05, "loss": 0.9149, "step": 5197 }, { "epoch": 41.584, "grad_norm": 16.719985961914062, "learning_rate": 3.2484444444444446e-05, "loss": 0.8815, "step": 5198 }, { "epoch": 41.592, "grad_norm": 23.18143081665039, "learning_rate": 3.248e-05, "loss": 0.9212, "step": 5199 }, { "epoch": 41.6, "grad_norm": 37.26943588256836, "learning_rate": 3.247555555555556e-05, "loss": 1.0814, "step": 5200 }, { "epoch": 41.608, "grad_norm": 27.543588638305664, "learning_rate": 3.247111111111111e-05, "loss": 0.7502, "step": 5201 }, { "epoch": 41.616, "grad_norm": 45.09223937988281, "learning_rate": 3.2466666666666665e-05, "loss": 1.1488, "step": 5202 }, { "epoch": 41.624, "grad_norm": 121.68880462646484, "learning_rate": 3.246222222222222e-05, "loss": 1.1565, "step": 5203 }, { "epoch": 41.632, "grad_norm": 45.20256042480469, "learning_rate": 3.245777777777778e-05, "loss": 1.3693, "step": 5204 }, { "epoch": 41.64, "grad_norm": 33.163414001464844, "learning_rate": 3.2453333333333337e-05, "loss": 1.7319, "step": 5205 }, { "epoch": 41.648, "grad_norm": 47.38608932495117, "learning_rate": 3.244888888888889e-05, "loss": 1.3971, "step": 5206 }, { "epoch": 41.656, "grad_norm": 39.316959381103516, "learning_rate": 3.2444444444444446e-05, "loss": 1.4257, "step": 5207 }, { "epoch": 41.664, "grad_norm": 62.799041748046875, "learning_rate": 3.244e-05, "loss": 1.4286, "step": 5208 }, { "epoch": 41.672, "grad_norm": 34.16171646118164, "learning_rate": 3.2435555555555556e-05, "loss": 0.7869, "step": 5209 }, { "epoch": 41.68, "grad_norm": 50.7584114074707, "learning_rate": 3.243111111111111e-05, "loss": 1.1232, "step": 5210 }, { "epoch": 41.688, "grad_norm": 45.00275421142578, "learning_rate": 3.2426666666666666e-05, "loss": 0.774, "step": 5211 }, { "epoch": 41.696, "grad_norm": 60.90382385253906, "learning_rate": 3.242222222222223e-05, "loss": 1.1285, "step": 5212 }, { "epoch": 41.704, "grad_norm": 21.45298957824707, "learning_rate": 3.241777777777778e-05, "loss": 0.7778, "step": 5213 }, { "epoch": 41.712, "grad_norm": 21.255842208862305, "learning_rate": 3.241333333333334e-05, "loss": 1.353, "step": 5214 }, { "epoch": 41.72, "grad_norm": 20.05939292907715, "learning_rate": 3.240888888888889e-05, "loss": 1.1014, "step": 5215 }, { "epoch": 41.728, "grad_norm": 24.197246551513672, "learning_rate": 3.240444444444445e-05, "loss": 0.7977, "step": 5216 }, { "epoch": 41.736, "grad_norm": 38.05976486206055, "learning_rate": 3.24e-05, "loss": 1.1919, "step": 5217 }, { "epoch": 41.744, "grad_norm": 37.917781829833984, "learning_rate": 3.2395555555555556e-05, "loss": 0.7396, "step": 5218 }, { "epoch": 41.752, "grad_norm": 18.463293075561523, "learning_rate": 3.239111111111111e-05, "loss": 1.179, "step": 5219 }, { "epoch": 41.76, "grad_norm": 23.355371475219727, "learning_rate": 3.238666666666667e-05, "loss": 1.079, "step": 5220 }, { "epoch": 41.768, "grad_norm": 18.33203125, "learning_rate": 3.238222222222223e-05, "loss": 0.6557, "step": 5221 }, { "epoch": 41.776, "grad_norm": 51.20957946777344, "learning_rate": 3.2377777777777776e-05, "loss": 1.1611, "step": 5222 }, { "epoch": 41.784, "grad_norm": 22.173154830932617, "learning_rate": 3.237333333333333e-05, "loss": 1.0425, "step": 5223 }, { "epoch": 41.792, "grad_norm": 28.222164154052734, "learning_rate": 3.236888888888889e-05, "loss": 1.9052, "step": 5224 }, { "epoch": 41.8, "grad_norm": 17.514265060424805, "learning_rate": 3.236444444444445e-05, "loss": 1.1184, "step": 5225 }, { "epoch": 41.808, "grad_norm": 18.238510131835938, "learning_rate": 3.236e-05, "loss": 0.8219, "step": 5226 }, { "epoch": 41.816, "grad_norm": 13.175650596618652, "learning_rate": 3.235555555555556e-05, "loss": 1.145, "step": 5227 }, { "epoch": 41.824, "grad_norm": 29.204261779785156, "learning_rate": 3.235111111111112e-05, "loss": 1.5206, "step": 5228 }, { "epoch": 41.832, "grad_norm": 34.49699020385742, "learning_rate": 3.2346666666666666e-05, "loss": 0.9124, "step": 5229 }, { "epoch": 41.84, "grad_norm": 42.661380767822266, "learning_rate": 3.234222222222222e-05, "loss": 1.9039, "step": 5230 }, { "epoch": 41.848, "grad_norm": 20.246925354003906, "learning_rate": 3.2337777777777776e-05, "loss": 0.9248, "step": 5231 }, { "epoch": 41.856, "grad_norm": 15.71491527557373, "learning_rate": 3.233333333333333e-05, "loss": 0.8823, "step": 5232 }, { "epoch": 41.864, "grad_norm": 18.566699981689453, "learning_rate": 3.232888888888889e-05, "loss": 0.8434, "step": 5233 }, { "epoch": 41.872, "grad_norm": 16.959238052368164, "learning_rate": 3.232444444444445e-05, "loss": 1.1742, "step": 5234 }, { "epoch": 41.88, "grad_norm": 32.25905227661133, "learning_rate": 3.232e-05, "loss": 1.2116, "step": 5235 }, { "epoch": 41.888, "grad_norm": 16.052650451660156, "learning_rate": 3.231555555555556e-05, "loss": 1.1143, "step": 5236 }, { "epoch": 41.896, "grad_norm": 18.827301025390625, "learning_rate": 3.231111111111111e-05, "loss": 0.9372, "step": 5237 }, { "epoch": 41.904, "grad_norm": 16.343265533447266, "learning_rate": 3.230666666666667e-05, "loss": 0.8357, "step": 5238 }, { "epoch": 41.912, "grad_norm": 21.824909210205078, "learning_rate": 3.230222222222222e-05, "loss": 1.3297, "step": 5239 }, { "epoch": 41.92, "grad_norm": 23.63535499572754, "learning_rate": 3.2297777777777776e-05, "loss": 0.9105, "step": 5240 }, { "epoch": 41.928, "grad_norm": 15.624022483825684, "learning_rate": 3.229333333333334e-05, "loss": 1.3617, "step": 5241 }, { "epoch": 41.936, "grad_norm": 48.77760314941406, "learning_rate": 3.228888888888889e-05, "loss": 1.1575, "step": 5242 }, { "epoch": 41.944, "grad_norm": 18.36016082763672, "learning_rate": 3.228444444444445e-05, "loss": 1.3544, "step": 5243 }, { "epoch": 41.952, "grad_norm": 14.683088302612305, "learning_rate": 3.2279999999999996e-05, "loss": 0.9787, "step": 5244 }, { "epoch": 41.96, "grad_norm": 13.905756950378418, "learning_rate": 3.227555555555556e-05, "loss": 1.9926, "step": 5245 }, { "epoch": 41.968, "grad_norm": 18.38192367553711, "learning_rate": 3.227111111111111e-05, "loss": 0.9649, "step": 5246 }, { "epoch": 41.976, "grad_norm": 46.890419006347656, "learning_rate": 3.226666666666667e-05, "loss": 0.969, "step": 5247 }, { "epoch": 41.984, "grad_norm": 40.23917770385742, "learning_rate": 3.226222222222222e-05, "loss": 0.893, "step": 5248 }, { "epoch": 41.992, "grad_norm": 52.05929946899414, "learning_rate": 3.2257777777777784e-05, "loss": 1.3035, "step": 5249 }, { "epoch": 42.0, "grad_norm": 20.164508819580078, "learning_rate": 3.225333333333334e-05, "loss": 1.1073, "step": 5250 }, { "epoch": 42.0, "eval_loss": 1.167248249053955, "eval_map": 0.3453, "eval_map_50": 0.7115, "eval_map_75": 0.3339, "eval_map_Coverall": 0.5582, "eval_map_Face_Shield": 0.4002, "eval_map_Gloves": 0.2445, "eval_map_Goggles": 0.1881, "eval_map_Mask": 0.3354, "eval_map_large": 0.5861, "eval_map_medium": 0.2296, "eval_map_small": 0.1621, "eval_mar_1": 0.2937, "eval_mar_10": 0.4752, "eval_mar_100": 0.482, "eval_mar_100_Coverall": 0.72, "eval_mar_100_Face_Shield": 0.5, "eval_mar_100_Gloves": 0.3836, "eval_mar_100_Goggles": 0.3812, "eval_mar_100_Mask": 0.425, "eval_mar_large": 0.681, "eval_mar_medium": 0.3383, "eval_mar_small": 0.1952, "eval_runtime": 0.9214, "eval_samples_per_second": 31.473, "eval_steps_per_second": 2.171, "step": 5250 }, { "epoch": 42.008, "grad_norm": 39.224910736083984, "learning_rate": 3.2248888888888887e-05, "loss": 1.9584, "step": 5251 }, { "epoch": 42.016, "grad_norm": 22.911781311035156, "learning_rate": 3.224444444444444e-05, "loss": 0.8227, "step": 5252 }, { "epoch": 42.024, "grad_norm": 13.494400978088379, "learning_rate": 3.224e-05, "loss": 1.2191, "step": 5253 }, { "epoch": 42.032, "grad_norm": 42.54161834716797, "learning_rate": 3.223555555555556e-05, "loss": 1.0934, "step": 5254 }, { "epoch": 42.04, "grad_norm": 12.57274341583252, "learning_rate": 3.223111111111111e-05, "loss": 1.1195, "step": 5255 }, { "epoch": 42.048, "grad_norm": 14.891883850097656, "learning_rate": 3.222666666666667e-05, "loss": 1.296, "step": 5256 }, { "epoch": 42.056, "grad_norm": 43.855289459228516, "learning_rate": 3.222222222222223e-05, "loss": 1.8437, "step": 5257 }, { "epoch": 42.064, "grad_norm": 25.810993194580078, "learning_rate": 3.221777777777778e-05, "loss": 0.9425, "step": 5258 }, { "epoch": 42.072, "grad_norm": 55.774044036865234, "learning_rate": 3.221333333333333e-05, "loss": 1.2525, "step": 5259 }, { "epoch": 42.08, "grad_norm": 14.838007926940918, "learning_rate": 3.220888888888889e-05, "loss": 0.7277, "step": 5260 }, { "epoch": 42.088, "grad_norm": 22.971574783325195, "learning_rate": 3.220444444444445e-05, "loss": 2.7987, "step": 5261 }, { "epoch": 42.096, "grad_norm": 34.97666549682617, "learning_rate": 3.2200000000000003e-05, "loss": 1.1974, "step": 5262 }, { "epoch": 42.104, "grad_norm": 11.708389282226562, "learning_rate": 3.219555555555556e-05, "loss": 1.023, "step": 5263 }, { "epoch": 42.112, "grad_norm": 24.53336524963379, "learning_rate": 3.219111111111111e-05, "loss": 0.9194, "step": 5264 }, { "epoch": 42.12, "grad_norm": 40.16780471801758, "learning_rate": 3.218666666666667e-05, "loss": 1.2352, "step": 5265 }, { "epoch": 42.128, "grad_norm": 26.02060317993164, "learning_rate": 3.218222222222222e-05, "loss": 0.9092, "step": 5266 }, { "epoch": 42.136, "grad_norm": 23.244722366333008, "learning_rate": 3.217777777777778e-05, "loss": 1.0155, "step": 5267 }, { "epoch": 42.144, "grad_norm": 29.346050262451172, "learning_rate": 3.217333333333333e-05, "loss": 1.1333, "step": 5268 }, { "epoch": 42.152, "grad_norm": 22.47934341430664, "learning_rate": 3.2168888888888894e-05, "loss": 0.7826, "step": 5269 }, { "epoch": 42.16, "grad_norm": 22.24923324584961, "learning_rate": 3.216444444444445e-05, "loss": 1.2487, "step": 5270 }, { "epoch": 42.168, "grad_norm": 45.917118072509766, "learning_rate": 3.2160000000000004e-05, "loss": 1.4909, "step": 5271 }, { "epoch": 42.176, "grad_norm": 24.130596160888672, "learning_rate": 3.215555555555556e-05, "loss": 0.8673, "step": 5272 }, { "epoch": 42.184, "grad_norm": 20.490774154663086, "learning_rate": 3.2151111111111113e-05, "loss": 1.371, "step": 5273 }, { "epoch": 42.192, "grad_norm": 85.77549743652344, "learning_rate": 3.214666666666667e-05, "loss": 1.3339, "step": 5274 }, { "epoch": 42.2, "grad_norm": 59.64024353027344, "learning_rate": 3.214222222222222e-05, "loss": 1.0575, "step": 5275 }, { "epoch": 42.208, "grad_norm": 40.237552642822266, "learning_rate": 3.213777777777778e-05, "loss": 1.3569, "step": 5276 }, { "epoch": 42.216, "grad_norm": 56.47239303588867, "learning_rate": 3.213333333333334e-05, "loss": 0.9919, "step": 5277 }, { "epoch": 42.224, "grad_norm": 37.80424880981445, "learning_rate": 3.2128888888888894e-05, "loss": 1.1696, "step": 5278 }, { "epoch": 42.232, "grad_norm": 19.205533981323242, "learning_rate": 3.212444444444444e-05, "loss": 1.1923, "step": 5279 }, { "epoch": 42.24, "grad_norm": 15.776459693908691, "learning_rate": 3.212e-05, "loss": 1.6678, "step": 5280 }, { "epoch": 42.248, "grad_norm": 50.047786712646484, "learning_rate": 3.211555555555556e-05, "loss": 1.3104, "step": 5281 }, { "epoch": 42.256, "grad_norm": 17.85967445373535, "learning_rate": 3.2111111111111114e-05, "loss": 1.0728, "step": 5282 }, { "epoch": 42.264, "grad_norm": 15.334165573120117, "learning_rate": 3.210666666666667e-05, "loss": 1.0969, "step": 5283 }, { "epoch": 42.272, "grad_norm": 26.91650390625, "learning_rate": 3.2102222222222224e-05, "loss": 1.4297, "step": 5284 }, { "epoch": 42.28, "grad_norm": 24.652427673339844, "learning_rate": 3.209777777777778e-05, "loss": 2.0061, "step": 5285 }, { "epoch": 42.288, "grad_norm": 37.276058197021484, "learning_rate": 3.209333333333333e-05, "loss": 0.8867, "step": 5286 }, { "epoch": 42.296, "grad_norm": 83.07022094726562, "learning_rate": 3.208888888888889e-05, "loss": 1.023, "step": 5287 }, { "epoch": 42.304, "grad_norm": 117.62577819824219, "learning_rate": 3.208444444444444e-05, "loss": 0.8124, "step": 5288 }, { "epoch": 42.312, "grad_norm": 50.758182525634766, "learning_rate": 3.208e-05, "loss": 1.092, "step": 5289 }, { "epoch": 42.32, "grad_norm": 22.061260223388672, "learning_rate": 3.207555555555556e-05, "loss": 1.055, "step": 5290 }, { "epoch": 42.328, "grad_norm": 45.816368103027344, "learning_rate": 3.2071111111111114e-05, "loss": 0.7894, "step": 5291 }, { "epoch": 42.336, "grad_norm": 30.688047409057617, "learning_rate": 3.206666666666667e-05, "loss": 0.85, "step": 5292 }, { "epoch": 42.344, "grad_norm": 21.75715446472168, "learning_rate": 3.2062222222222224e-05, "loss": 1.1639, "step": 5293 }, { "epoch": 42.352, "grad_norm": 27.557754516601562, "learning_rate": 3.205777777777778e-05, "loss": 0.9233, "step": 5294 }, { "epoch": 42.36, "grad_norm": 22.491561889648438, "learning_rate": 3.2053333333333334e-05, "loss": 0.9664, "step": 5295 }, { "epoch": 42.368, "grad_norm": 11.572044372558594, "learning_rate": 3.204888888888889e-05, "loss": 0.5892, "step": 5296 }, { "epoch": 42.376, "grad_norm": 36.26481246948242, "learning_rate": 3.204444444444444e-05, "loss": 0.9905, "step": 5297 }, { "epoch": 42.384, "grad_norm": 33.982505798339844, "learning_rate": 3.2040000000000005e-05, "loss": 1.4643, "step": 5298 }, { "epoch": 42.392, "grad_norm": 28.10556411743164, "learning_rate": 3.203555555555556e-05, "loss": 1.2036, "step": 5299 }, { "epoch": 42.4, "grad_norm": 44.344818115234375, "learning_rate": 3.2031111111111115e-05, "loss": 1.1751, "step": 5300 }, { "epoch": 42.408, "grad_norm": 30.70903205871582, "learning_rate": 3.202666666666666e-05, "loss": 1.5417, "step": 5301 }, { "epoch": 42.416, "grad_norm": 22.538143157958984, "learning_rate": 3.2022222222222224e-05, "loss": 0.9025, "step": 5302 }, { "epoch": 42.424, "grad_norm": 36.76937484741211, "learning_rate": 3.201777777777778e-05, "loss": 0.661, "step": 5303 }, { "epoch": 42.432, "grad_norm": 19.358779907226562, "learning_rate": 3.2013333333333334e-05, "loss": 1.0001, "step": 5304 }, { "epoch": 42.44, "grad_norm": 24.876684188842773, "learning_rate": 3.200888888888889e-05, "loss": 1.1378, "step": 5305 }, { "epoch": 42.448, "grad_norm": 21.39922332763672, "learning_rate": 3.200444444444445e-05, "loss": 1.1797, "step": 5306 }, { "epoch": 42.456, "grad_norm": 22.731828689575195, "learning_rate": 3.2000000000000005e-05, "loss": 1.3783, "step": 5307 }, { "epoch": 42.464, "grad_norm": 22.631269454956055, "learning_rate": 3.1995555555555553e-05, "loss": 1.6553, "step": 5308 }, { "epoch": 42.472, "grad_norm": 64.98665618896484, "learning_rate": 3.199111111111111e-05, "loss": 1.2803, "step": 5309 }, { "epoch": 42.48, "grad_norm": 25.76990509033203, "learning_rate": 3.198666666666667e-05, "loss": 0.8098, "step": 5310 }, { "epoch": 42.488, "grad_norm": 24.45260238647461, "learning_rate": 3.1982222222222225e-05, "loss": 1.0758, "step": 5311 }, { "epoch": 42.496, "grad_norm": 130.91856384277344, "learning_rate": 3.197777777777778e-05, "loss": 0.9292, "step": 5312 }, { "epoch": 42.504, "grad_norm": 34.92465591430664, "learning_rate": 3.1973333333333334e-05, "loss": 1.1543, "step": 5313 }, { "epoch": 42.512, "grad_norm": 91.16649627685547, "learning_rate": 3.1968888888888896e-05, "loss": 0.9914, "step": 5314 }, { "epoch": 42.52, "grad_norm": 24.39600372314453, "learning_rate": 3.1964444444444444e-05, "loss": 1.2146, "step": 5315 }, { "epoch": 42.528, "grad_norm": 28.080839157104492, "learning_rate": 3.196e-05, "loss": 0.8953, "step": 5316 }, { "epoch": 42.536, "grad_norm": 56.679080963134766, "learning_rate": 3.1955555555555554e-05, "loss": 0.9579, "step": 5317 }, { "epoch": 42.544, "grad_norm": 15.124887466430664, "learning_rate": 3.1951111111111115e-05, "loss": 1.1105, "step": 5318 }, { "epoch": 42.552, "grad_norm": 24.619035720825195, "learning_rate": 3.194666666666667e-05, "loss": 0.9418, "step": 5319 }, { "epoch": 42.56, "grad_norm": 38.89430236816406, "learning_rate": 3.1942222222222225e-05, "loss": 0.9175, "step": 5320 }, { "epoch": 42.568, "grad_norm": 38.01605987548828, "learning_rate": 3.193777777777778e-05, "loss": 1.1479, "step": 5321 }, { "epoch": 42.576, "grad_norm": 21.08412742614746, "learning_rate": 3.1933333333333335e-05, "loss": 1.2372, "step": 5322 }, { "epoch": 42.584, "grad_norm": 20.71401596069336, "learning_rate": 3.192888888888889e-05, "loss": 1.1549, "step": 5323 }, { "epoch": 42.592, "grad_norm": 16.224212646484375, "learning_rate": 3.1924444444444444e-05, "loss": 1.2653, "step": 5324 }, { "epoch": 42.6, "grad_norm": 26.196962356567383, "learning_rate": 3.192e-05, "loss": 1.1216, "step": 5325 }, { "epoch": 42.608, "grad_norm": 17.965742111206055, "learning_rate": 3.191555555555556e-05, "loss": 0.9199, "step": 5326 }, { "epoch": 42.616, "grad_norm": 55.4814567565918, "learning_rate": 3.1911111111111116e-05, "loss": 1.6016, "step": 5327 }, { "epoch": 42.624, "grad_norm": 29.62144660949707, "learning_rate": 3.190666666666667e-05, "loss": 0.8218, "step": 5328 }, { "epoch": 42.632, "grad_norm": 26.007822036743164, "learning_rate": 3.1902222222222225e-05, "loss": 1.3107, "step": 5329 }, { "epoch": 42.64, "grad_norm": 23.892789840698242, "learning_rate": 3.189777777777778e-05, "loss": 0.7162, "step": 5330 }, { "epoch": 42.648, "grad_norm": 29.910198211669922, "learning_rate": 3.1893333333333335e-05, "loss": 0.7165, "step": 5331 }, { "epoch": 42.656, "grad_norm": 28.95359230041504, "learning_rate": 3.188888888888889e-05, "loss": 0.9525, "step": 5332 }, { "epoch": 42.664, "grad_norm": 35.51436996459961, "learning_rate": 3.1884444444444445e-05, "loss": 1.0163, "step": 5333 }, { "epoch": 42.672, "grad_norm": 15.29347038269043, "learning_rate": 3.188e-05, "loss": 0.8445, "step": 5334 }, { "epoch": 42.68, "grad_norm": 22.712390899658203, "learning_rate": 3.187555555555556e-05, "loss": 0.828, "step": 5335 }, { "epoch": 42.688, "grad_norm": 31.83151626586914, "learning_rate": 3.187111111111111e-05, "loss": 1.0565, "step": 5336 }, { "epoch": 42.696, "grad_norm": 31.795207977294922, "learning_rate": 3.1866666666666664e-05, "loss": 1.0744, "step": 5337 }, { "epoch": 42.704, "grad_norm": 21.28531265258789, "learning_rate": 3.186222222222222e-05, "loss": 1.0521, "step": 5338 }, { "epoch": 42.712, "grad_norm": 17.67963981628418, "learning_rate": 3.185777777777778e-05, "loss": 1.271, "step": 5339 }, { "epoch": 42.72, "grad_norm": 90.63045501708984, "learning_rate": 3.1853333333333336e-05, "loss": 0.8364, "step": 5340 }, { "epoch": 42.728, "grad_norm": 24.297279357910156, "learning_rate": 3.184888888888889e-05, "loss": 1.1281, "step": 5341 }, { "epoch": 42.736, "grad_norm": 38.24909973144531, "learning_rate": 3.1844444444444445e-05, "loss": 1.8262, "step": 5342 }, { "epoch": 42.744, "grad_norm": 21.73087501525879, "learning_rate": 3.184e-05, "loss": 1.7711, "step": 5343 }, { "epoch": 42.752, "grad_norm": 41.639034271240234, "learning_rate": 3.1835555555555555e-05, "loss": 1.283, "step": 5344 }, { "epoch": 42.76, "grad_norm": 17.99226951599121, "learning_rate": 3.183111111111111e-05, "loss": 1.3384, "step": 5345 }, { "epoch": 42.768, "grad_norm": 23.03706169128418, "learning_rate": 3.1826666666666665e-05, "loss": 0.9674, "step": 5346 }, { "epoch": 42.776, "grad_norm": 43.41774368286133, "learning_rate": 3.1822222222222226e-05, "loss": 0.8754, "step": 5347 }, { "epoch": 42.784, "grad_norm": 25.07912254333496, "learning_rate": 3.181777777777778e-05, "loss": 1.1408, "step": 5348 }, { "epoch": 42.792, "grad_norm": 19.984962463378906, "learning_rate": 3.1813333333333336e-05, "loss": 0.9923, "step": 5349 }, { "epoch": 42.8, "grad_norm": 29.63669204711914, "learning_rate": 3.180888888888889e-05, "loss": 1.2182, "step": 5350 }, { "epoch": 42.808, "grad_norm": 18.149761199951172, "learning_rate": 3.1804444444444446e-05, "loss": 1.0638, "step": 5351 }, { "epoch": 42.816, "grad_norm": 22.531063079833984, "learning_rate": 3.18e-05, "loss": 0.8605, "step": 5352 }, { "epoch": 42.824, "grad_norm": 15.140883445739746, "learning_rate": 3.1795555555555555e-05, "loss": 1.1923, "step": 5353 }, { "epoch": 42.832, "grad_norm": 133.52503967285156, "learning_rate": 3.179111111111111e-05, "loss": 1.3886, "step": 5354 }, { "epoch": 42.84, "grad_norm": 29.14088249206543, "learning_rate": 3.178666666666667e-05, "loss": 2.0601, "step": 5355 }, { "epoch": 42.848, "grad_norm": 18.17266273498535, "learning_rate": 3.178222222222223e-05, "loss": 1.2853, "step": 5356 }, { "epoch": 42.856, "grad_norm": 16.38196563720703, "learning_rate": 3.177777777777778e-05, "loss": 0.8506, "step": 5357 }, { "epoch": 42.864, "grad_norm": 26.193626403808594, "learning_rate": 3.177333333333333e-05, "loss": 0.9089, "step": 5358 }, { "epoch": 42.872, "grad_norm": 91.2884521484375, "learning_rate": 3.176888888888889e-05, "loss": 0.8345, "step": 5359 }, { "epoch": 42.88, "grad_norm": 41.985191345214844, "learning_rate": 3.1764444444444446e-05, "loss": 1.3975, "step": 5360 }, { "epoch": 42.888, "grad_norm": 27.54715347290039, "learning_rate": 3.176e-05, "loss": 0.9372, "step": 5361 }, { "epoch": 42.896, "grad_norm": 19.4273681640625, "learning_rate": 3.1755555555555556e-05, "loss": 1.1484, "step": 5362 }, { "epoch": 42.904, "grad_norm": 17.71941566467285, "learning_rate": 3.175111111111112e-05, "loss": 0.8881, "step": 5363 }, { "epoch": 42.912, "grad_norm": 24.904563903808594, "learning_rate": 3.174666666666667e-05, "loss": 1.1929, "step": 5364 }, { "epoch": 42.92, "grad_norm": 27.415058135986328, "learning_rate": 3.174222222222222e-05, "loss": 0.6563, "step": 5365 }, { "epoch": 42.928, "grad_norm": 21.969608306884766, "learning_rate": 3.1737777777777775e-05, "loss": 1.0292, "step": 5366 }, { "epoch": 42.936, "grad_norm": 27.07681655883789, "learning_rate": 3.173333333333334e-05, "loss": 0.9201, "step": 5367 }, { "epoch": 42.944, "grad_norm": 40.24681854248047, "learning_rate": 3.172888888888889e-05, "loss": 0.9077, "step": 5368 }, { "epoch": 42.952, "grad_norm": 17.7329158782959, "learning_rate": 3.1724444444444446e-05, "loss": 0.6988, "step": 5369 }, { "epoch": 42.96, "grad_norm": 21.442916870117188, "learning_rate": 3.172e-05, "loss": 1.3889, "step": 5370 }, { "epoch": 42.968, "grad_norm": 29.032581329345703, "learning_rate": 3.1715555555555556e-05, "loss": 1.0512, "step": 5371 }, { "epoch": 42.976, "grad_norm": 27.437559127807617, "learning_rate": 3.171111111111111e-05, "loss": 1.0206, "step": 5372 }, { "epoch": 42.984, "grad_norm": 17.639404296875, "learning_rate": 3.1706666666666666e-05, "loss": 0.8593, "step": 5373 }, { "epoch": 42.992, "grad_norm": 29.44976234436035, "learning_rate": 3.170222222222222e-05, "loss": 1.086, "step": 5374 }, { "epoch": 43.0, "grad_norm": 22.535863876342773, "learning_rate": 3.169777777777778e-05, "loss": 1.251, "step": 5375 }, { "epoch": 43.0, "eval_loss": 1.145214319229126, "eval_map": 0.3815, "eval_map_50": 0.7397, "eval_map_75": 0.3451, "eval_map_Coverall": 0.5813, "eval_map_Face_Shield": 0.3979, "eval_map_Gloves": 0.2825, "eval_map_Goggles": 0.2582, "eval_map_Mask": 0.3878, "eval_map_large": 0.594, "eval_map_medium": 0.2645, "eval_map_small": 0.2528, "eval_mar_1": 0.3073, "eval_mar_10": 0.5308, "eval_mar_100": 0.5436, "eval_mar_100_Coverall": 0.7311, "eval_mar_100_Face_Shield": 0.6824, "eval_mar_100_Gloves": 0.3984, "eval_mar_100_Goggles": 0.4563, "eval_mar_100_Mask": 0.45, "eval_mar_large": 0.7127, "eval_mar_medium": 0.4255, "eval_mar_small": 0.3163, "eval_runtime": 0.944, "eval_samples_per_second": 30.719, "eval_steps_per_second": 2.119, "step": 5375 }, { "epoch": 43.008, "grad_norm": 18.681428909301758, "learning_rate": 3.169333333333334e-05, "loss": 0.7802, "step": 5376 }, { "epoch": 43.016, "grad_norm": 21.787147521972656, "learning_rate": 3.168888888888889e-05, "loss": 0.9758, "step": 5377 }, { "epoch": 43.024, "grad_norm": 16.628997802734375, "learning_rate": 3.168444444444445e-05, "loss": 1.1115, "step": 5378 }, { "epoch": 43.032, "grad_norm": 46.030399322509766, "learning_rate": 3.168e-05, "loss": 1.001, "step": 5379 }, { "epoch": 43.04, "grad_norm": 31.61185073852539, "learning_rate": 3.1675555555555556e-05, "loss": 1.1113, "step": 5380 }, { "epoch": 43.048, "grad_norm": 18.34981918334961, "learning_rate": 3.167111111111111e-05, "loss": 1.1157, "step": 5381 }, { "epoch": 43.056, "grad_norm": 48.716854095458984, "learning_rate": 3.1666666666666666e-05, "loss": 1.2656, "step": 5382 }, { "epoch": 43.064, "grad_norm": 28.843732833862305, "learning_rate": 3.166222222222223e-05, "loss": 0.8094, "step": 5383 }, { "epoch": 43.072, "grad_norm": 37.8372917175293, "learning_rate": 3.165777777777778e-05, "loss": 1.182, "step": 5384 }, { "epoch": 43.08, "grad_norm": 48.70436096191406, "learning_rate": 3.165333333333334e-05, "loss": 1.1475, "step": 5385 }, { "epoch": 43.088, "grad_norm": 25.073453903198242, "learning_rate": 3.164888888888889e-05, "loss": 1.009, "step": 5386 }, { "epoch": 43.096, "grad_norm": 28.291461944580078, "learning_rate": 3.164444444444444e-05, "loss": 1.066, "step": 5387 }, { "epoch": 43.104, "grad_norm": 19.72757911682129, "learning_rate": 3.164e-05, "loss": 0.8828, "step": 5388 }, { "epoch": 43.112, "grad_norm": 40.41891098022461, "learning_rate": 3.163555555555556e-05, "loss": 1.0946, "step": 5389 }, { "epoch": 43.12, "grad_norm": 52.320369720458984, "learning_rate": 3.163111111111111e-05, "loss": 1.1451, "step": 5390 }, { "epoch": 43.128, "grad_norm": 48.138023376464844, "learning_rate": 3.1626666666666667e-05, "loss": 0.7707, "step": 5391 }, { "epoch": 43.136, "grad_norm": 21.978744506835938, "learning_rate": 3.162222222222223e-05, "loss": 2.7583, "step": 5392 }, { "epoch": 43.144, "grad_norm": 16.036828994750977, "learning_rate": 3.1617777777777776e-05, "loss": 0.8753, "step": 5393 }, { "epoch": 43.152, "grad_norm": 20.195253372192383, "learning_rate": 3.161333333333333e-05, "loss": 1.3184, "step": 5394 }, { "epoch": 43.16, "grad_norm": 25.48206901550293, "learning_rate": 3.1608888888888886e-05, "loss": 1.2833, "step": 5395 }, { "epoch": 43.168, "grad_norm": 17.033823013305664, "learning_rate": 3.160444444444445e-05, "loss": 1.2562, "step": 5396 }, { "epoch": 43.176, "grad_norm": 43.96566390991211, "learning_rate": 3.16e-05, "loss": 1.8349, "step": 5397 }, { "epoch": 43.184, "grad_norm": 13.10722827911377, "learning_rate": 3.159555555555556e-05, "loss": 2.1137, "step": 5398 }, { "epoch": 43.192, "grad_norm": 39.183006286621094, "learning_rate": 3.159111111111111e-05, "loss": 0.8937, "step": 5399 }, { "epoch": 43.2, "grad_norm": 18.97592544555664, "learning_rate": 3.158666666666667e-05, "loss": 1.1686, "step": 5400 }, { "epoch": 43.208, "grad_norm": 14.536492347717285, "learning_rate": 3.158222222222222e-05, "loss": 0.9518, "step": 5401 }, { "epoch": 43.216, "grad_norm": 23.123313903808594, "learning_rate": 3.1577777777777777e-05, "loss": 1.1551, "step": 5402 }, { "epoch": 43.224, "grad_norm": 26.76687240600586, "learning_rate": 3.157333333333333e-05, "loss": 1.1882, "step": 5403 }, { "epoch": 43.232, "grad_norm": 32.45722198486328, "learning_rate": 3.156888888888889e-05, "loss": 1.3764, "step": 5404 }, { "epoch": 43.24, "grad_norm": 53.08210372924805, "learning_rate": 3.156444444444445e-05, "loss": 1.2056, "step": 5405 }, { "epoch": 43.248, "grad_norm": 33.415775299072266, "learning_rate": 3.156e-05, "loss": 1.1573, "step": 5406 }, { "epoch": 43.256, "grad_norm": 26.9246826171875, "learning_rate": 3.155555555555556e-05, "loss": 1.1193, "step": 5407 }, { "epoch": 43.264, "grad_norm": 30.516315460205078, "learning_rate": 3.155111111111111e-05, "loss": 0.8692, "step": 5408 }, { "epoch": 43.272, "grad_norm": 54.71518325805664, "learning_rate": 3.154666666666667e-05, "loss": 0.9734, "step": 5409 }, { "epoch": 43.28, "grad_norm": 20.463586807250977, "learning_rate": 3.154222222222222e-05, "loss": 0.9943, "step": 5410 }, { "epoch": 43.288, "grad_norm": 15.290079116821289, "learning_rate": 3.153777777777778e-05, "loss": 2.2705, "step": 5411 }, { "epoch": 43.296, "grad_norm": 27.748916625976562, "learning_rate": 3.153333333333334e-05, "loss": 0.8896, "step": 5412 }, { "epoch": 43.304, "grad_norm": 24.45749855041504, "learning_rate": 3.1528888888888893e-05, "loss": 0.8092, "step": 5413 }, { "epoch": 43.312, "grad_norm": 132.6554412841797, "learning_rate": 3.152444444444445e-05, "loss": 1.0263, "step": 5414 }, { "epoch": 43.32, "grad_norm": 28.7912540435791, "learning_rate": 3.1519999999999996e-05, "loss": 1.4731, "step": 5415 }, { "epoch": 43.328, "grad_norm": 28.643329620361328, "learning_rate": 3.151555555555556e-05, "loss": 1.0647, "step": 5416 }, { "epoch": 43.336, "grad_norm": 39.72140884399414, "learning_rate": 3.151111111111111e-05, "loss": 0.9035, "step": 5417 }, { "epoch": 43.344, "grad_norm": 41.449092864990234, "learning_rate": 3.150666666666667e-05, "loss": 0.8604, "step": 5418 }, { "epoch": 43.352, "grad_norm": 42.7596549987793, "learning_rate": 3.150222222222222e-05, "loss": 0.9544, "step": 5419 }, { "epoch": 43.36, "grad_norm": 26.909433364868164, "learning_rate": 3.1497777777777784e-05, "loss": 0.9144, "step": 5420 }, { "epoch": 43.368, "grad_norm": 13.030452728271484, "learning_rate": 3.149333333333334e-05, "loss": 1.2212, "step": 5421 }, { "epoch": 43.376, "grad_norm": 16.325531005859375, "learning_rate": 3.148888888888889e-05, "loss": 0.8403, "step": 5422 }, { "epoch": 43.384, "grad_norm": 38.361793518066406, "learning_rate": 3.148444444444444e-05, "loss": 1.0318, "step": 5423 }, { "epoch": 43.392, "grad_norm": 23.834136962890625, "learning_rate": 3.1480000000000004e-05, "loss": 1.0182, "step": 5424 }, { "epoch": 43.4, "grad_norm": 35.47586441040039, "learning_rate": 3.147555555555556e-05, "loss": 0.8095, "step": 5425 }, { "epoch": 43.408, "grad_norm": 82.33019256591797, "learning_rate": 3.147111111111111e-05, "loss": 1.5561, "step": 5426 }, { "epoch": 43.416, "grad_norm": 16.975383758544922, "learning_rate": 3.146666666666667e-05, "loss": 1.1395, "step": 5427 }, { "epoch": 43.424, "grad_norm": 43.63080596923828, "learning_rate": 3.146222222222222e-05, "loss": 1.1891, "step": 5428 }, { "epoch": 43.432, "grad_norm": 18.88796615600586, "learning_rate": 3.145777777777778e-05, "loss": 1.1025, "step": 5429 }, { "epoch": 43.44, "grad_norm": 18.47178077697754, "learning_rate": 3.145333333333333e-05, "loss": 0.9265, "step": 5430 }, { "epoch": 43.448, "grad_norm": 53.49228286743164, "learning_rate": 3.144888888888889e-05, "loss": 1.7977, "step": 5431 }, { "epoch": 43.456, "grad_norm": 36.22589111328125, "learning_rate": 3.144444444444445e-05, "loss": 0.8898, "step": 5432 }, { "epoch": 43.464, "grad_norm": 24.59613800048828, "learning_rate": 3.1440000000000004e-05, "loss": 1.1104, "step": 5433 }, { "epoch": 43.472, "grad_norm": 17.16135025024414, "learning_rate": 3.143555555555556e-05, "loss": 1.429, "step": 5434 }, { "epoch": 43.48, "grad_norm": 34.17888259887695, "learning_rate": 3.1431111111111114e-05, "loss": 1.1452, "step": 5435 }, { "epoch": 43.488, "grad_norm": 73.25718688964844, "learning_rate": 3.142666666666667e-05, "loss": 1.1889, "step": 5436 }, { "epoch": 43.496, "grad_norm": 21.313716888427734, "learning_rate": 3.142222222222222e-05, "loss": 1.5346, "step": 5437 }, { "epoch": 43.504, "grad_norm": 132.94007873535156, "learning_rate": 3.141777777777778e-05, "loss": 1.2381, "step": 5438 }, { "epoch": 43.512, "grad_norm": 22.678598403930664, "learning_rate": 3.141333333333333e-05, "loss": 1.2628, "step": 5439 }, { "epoch": 43.52, "grad_norm": 21.106014251708984, "learning_rate": 3.140888888888889e-05, "loss": 0.9864, "step": 5440 }, { "epoch": 43.528, "grad_norm": 55.88294982910156, "learning_rate": 3.140444444444445e-05, "loss": 0.8343, "step": 5441 }, { "epoch": 43.536, "grad_norm": 20.02904510498047, "learning_rate": 3.1400000000000004e-05, "loss": 0.7965, "step": 5442 }, { "epoch": 43.544, "grad_norm": 23.526947021484375, "learning_rate": 3.139555555555556e-05, "loss": 1.0002, "step": 5443 }, { "epoch": 43.552, "grad_norm": 33.431640625, "learning_rate": 3.139111111111111e-05, "loss": 1.1828, "step": 5444 }, { "epoch": 43.56, "grad_norm": 35.14539337158203, "learning_rate": 3.138666666666667e-05, "loss": 1.3046, "step": 5445 }, { "epoch": 43.568, "grad_norm": 30.258577346801758, "learning_rate": 3.1382222222222224e-05, "loss": 0.6982, "step": 5446 }, { "epoch": 43.576, "grad_norm": 20.570110321044922, "learning_rate": 3.137777777777778e-05, "loss": 1.1268, "step": 5447 }, { "epoch": 43.584, "grad_norm": 22.41664695739746, "learning_rate": 3.137333333333333e-05, "loss": 1.018, "step": 5448 }, { "epoch": 43.592, "grad_norm": 24.24004364013672, "learning_rate": 3.1368888888888895e-05, "loss": 0.9775, "step": 5449 }, { "epoch": 43.6, "grad_norm": 32.229820251464844, "learning_rate": 3.136444444444444e-05, "loss": 1.8269, "step": 5450 }, { "epoch": 43.608, "grad_norm": 21.49453353881836, "learning_rate": 3.136e-05, "loss": 0.8777, "step": 5451 }, { "epoch": 43.616, "grad_norm": 32.2406120300293, "learning_rate": 3.135555555555555e-05, "loss": 0.7539, "step": 5452 }, { "epoch": 43.624, "grad_norm": 32.32523727416992, "learning_rate": 3.1351111111111114e-05, "loss": 1.0406, "step": 5453 }, { "epoch": 43.632, "grad_norm": 22.127933502197266, "learning_rate": 3.134666666666667e-05, "loss": 1.2342, "step": 5454 }, { "epoch": 43.64, "grad_norm": 44.75752639770508, "learning_rate": 3.1342222222222224e-05, "loss": 1.0757, "step": 5455 }, { "epoch": 43.648, "grad_norm": 75.4614028930664, "learning_rate": 3.133777777777778e-05, "loss": 1.0065, "step": 5456 }, { "epoch": 43.656, "grad_norm": 32.008819580078125, "learning_rate": 3.1333333333333334e-05, "loss": 1.5013, "step": 5457 }, { "epoch": 43.664, "grad_norm": 21.45829200744629, "learning_rate": 3.132888888888889e-05, "loss": 1.0399, "step": 5458 }, { "epoch": 43.672, "grad_norm": 30.432424545288086, "learning_rate": 3.1324444444444443e-05, "loss": 1.116, "step": 5459 }, { "epoch": 43.68, "grad_norm": 25.90497589111328, "learning_rate": 3.132e-05, "loss": 0.9468, "step": 5460 }, { "epoch": 43.688, "grad_norm": 30.277164459228516, "learning_rate": 3.131555555555556e-05, "loss": 1.0691, "step": 5461 }, { "epoch": 43.696, "grad_norm": 14.990297317504883, "learning_rate": 3.1311111111111115e-05, "loss": 0.9236, "step": 5462 }, { "epoch": 43.704, "grad_norm": 23.38539695739746, "learning_rate": 3.130666666666667e-05, "loss": 1.164, "step": 5463 }, { "epoch": 43.712, "grad_norm": 19.244892120361328, "learning_rate": 3.1302222222222224e-05, "loss": 0.9701, "step": 5464 }, { "epoch": 43.72, "grad_norm": 28.497907638549805, "learning_rate": 3.129777777777778e-05, "loss": 1.1102, "step": 5465 }, { "epoch": 43.728, "grad_norm": 29.862628936767578, "learning_rate": 3.1293333333333334e-05, "loss": 1.2328, "step": 5466 }, { "epoch": 43.736, "grad_norm": 47.81716537475586, "learning_rate": 3.128888888888889e-05, "loss": 1.1324, "step": 5467 }, { "epoch": 43.744, "grad_norm": 25.33182716369629, "learning_rate": 3.1284444444444444e-05, "loss": 1.3226, "step": 5468 }, { "epoch": 43.752, "grad_norm": 17.14032554626465, "learning_rate": 3.1280000000000005e-05, "loss": 1.1711, "step": 5469 }, { "epoch": 43.76, "grad_norm": 16.02715301513672, "learning_rate": 3.127555555555556e-05, "loss": 1.2341, "step": 5470 }, { "epoch": 43.768, "grad_norm": 47.2015380859375, "learning_rate": 3.1271111111111115e-05, "loss": 1.1049, "step": 5471 }, { "epoch": 43.776, "grad_norm": 18.026622772216797, "learning_rate": 3.126666666666666e-05, "loss": 0.8084, "step": 5472 }, { "epoch": 43.784, "grad_norm": 19.461034774780273, "learning_rate": 3.1262222222222225e-05, "loss": 1.0016, "step": 5473 }, { "epoch": 43.792, "grad_norm": 25.968656539916992, "learning_rate": 3.125777777777778e-05, "loss": 1.1072, "step": 5474 }, { "epoch": 43.8, "grad_norm": 43.52934265136719, "learning_rate": 3.1253333333333335e-05, "loss": 1.0734, "step": 5475 }, { "epoch": 43.808, "grad_norm": 22.75004768371582, "learning_rate": 3.124888888888889e-05, "loss": 0.8635, "step": 5476 }, { "epoch": 43.816, "grad_norm": 17.977041244506836, "learning_rate": 3.124444444444445e-05, "loss": 1.3377, "step": 5477 }, { "epoch": 43.824, "grad_norm": 48.99538040161133, "learning_rate": 3.1240000000000006e-05, "loss": 1.0438, "step": 5478 }, { "epoch": 43.832, "grad_norm": 27.76185417175293, "learning_rate": 3.1235555555555554e-05, "loss": 1.1056, "step": 5479 }, { "epoch": 43.84, "grad_norm": 35.164222717285156, "learning_rate": 3.123111111111111e-05, "loss": 0.8757, "step": 5480 }, { "epoch": 43.848, "grad_norm": 19.454477310180664, "learning_rate": 3.122666666666667e-05, "loss": 0.7953, "step": 5481 }, { "epoch": 43.856, "grad_norm": 23.41345977783203, "learning_rate": 3.1222222222222225e-05, "loss": 1.1882, "step": 5482 }, { "epoch": 43.864, "grad_norm": 18.36427116394043, "learning_rate": 3.121777777777778e-05, "loss": 1.2102, "step": 5483 }, { "epoch": 43.872, "grad_norm": 21.932209014892578, "learning_rate": 3.1213333333333335e-05, "loss": 1.055, "step": 5484 }, { "epoch": 43.88, "grad_norm": 322.5445556640625, "learning_rate": 3.120888888888889e-05, "loss": 0.77, "step": 5485 }, { "epoch": 43.888, "grad_norm": 19.08990478515625, "learning_rate": 3.1204444444444445e-05, "loss": 1.0172, "step": 5486 }, { "epoch": 43.896, "grad_norm": 46.989990234375, "learning_rate": 3.12e-05, "loss": 1.0919, "step": 5487 }, { "epoch": 43.904, "grad_norm": 18.18601417541504, "learning_rate": 3.1195555555555554e-05, "loss": 1.1762, "step": 5488 }, { "epoch": 43.912, "grad_norm": 18.509939193725586, "learning_rate": 3.119111111111111e-05, "loss": 1.0387, "step": 5489 }, { "epoch": 43.92, "grad_norm": 23.77165985107422, "learning_rate": 3.118666666666667e-05, "loss": 1.3474, "step": 5490 }, { "epoch": 43.928, "grad_norm": 30.11639404296875, "learning_rate": 3.1182222222222226e-05, "loss": 1.0477, "step": 5491 }, { "epoch": 43.936, "grad_norm": 23.861103057861328, "learning_rate": 3.117777777777778e-05, "loss": 1.0441, "step": 5492 }, { "epoch": 43.944, "grad_norm": 14.345514297485352, "learning_rate": 3.1173333333333335e-05, "loss": 1.3665, "step": 5493 }, { "epoch": 43.952, "grad_norm": 33.93020248413086, "learning_rate": 3.116888888888889e-05, "loss": 1.4316, "step": 5494 }, { "epoch": 43.96, "grad_norm": 21.279390335083008, "learning_rate": 3.1164444444444445e-05, "loss": 1.1213, "step": 5495 }, { "epoch": 43.968, "grad_norm": 36.03968048095703, "learning_rate": 3.116e-05, "loss": 0.8444, "step": 5496 }, { "epoch": 43.976, "grad_norm": 18.07552146911621, "learning_rate": 3.1155555555555555e-05, "loss": 1.1018, "step": 5497 }, { "epoch": 43.984, "grad_norm": 14.725049018859863, "learning_rate": 3.1151111111111116e-05, "loss": 1.1246, "step": 5498 }, { "epoch": 43.992, "grad_norm": 19.39595603942871, "learning_rate": 3.114666666666667e-05, "loss": 1.2081, "step": 5499 }, { "epoch": 44.0, "grad_norm": 26.780746459960938, "learning_rate": 3.1142222222222226e-05, "loss": 1.2788, "step": 5500 }, { "epoch": 44.0, "eval_loss": 1.181670904159546, "eval_map": 0.3342, "eval_map_50": 0.6765, "eval_map_75": 0.291, "eval_map_Coverall": 0.5436, "eval_map_Face_Shield": 0.4131, "eval_map_Gloves": 0.248, "eval_map_Goggles": 0.1499, "eval_map_Mask": 0.3166, "eval_map_large": 0.5594, "eval_map_medium": 0.2106, "eval_map_small": 0.3345, "eval_mar_1": 0.2787, "eval_mar_10": 0.5078, "eval_mar_100": 0.5207, "eval_mar_100_Coverall": 0.6889, "eval_mar_100_Face_Shield": 0.6647, "eval_mar_100_Gloves": 0.382, "eval_mar_100_Goggles": 0.4313, "eval_mar_100_Mask": 0.4365, "eval_mar_large": 0.6859, "eval_mar_medium": 0.3898, "eval_mar_small": 0.3843, "eval_runtime": 0.9326, "eval_samples_per_second": 31.097, "eval_steps_per_second": 2.145, "step": 5500 }, { "epoch": 44.008, "grad_norm": 31.689496994018555, "learning_rate": 3.1137777777777774e-05, "loss": 0.999, "step": 5501 }, { "epoch": 44.016, "grad_norm": 12.365788459777832, "learning_rate": 3.1133333333333336e-05, "loss": 0.9486, "step": 5502 }, { "epoch": 44.024, "grad_norm": 15.803059577941895, "learning_rate": 3.112888888888889e-05, "loss": 1.1411, "step": 5503 }, { "epoch": 44.032, "grad_norm": 53.89457702636719, "learning_rate": 3.1124444444444445e-05, "loss": 1.4587, "step": 5504 }, { "epoch": 44.04, "grad_norm": 25.784448623657227, "learning_rate": 3.112e-05, "loss": 1.7336, "step": 5505 }, { "epoch": 44.048, "grad_norm": 37.72215270996094, "learning_rate": 3.111555555555556e-05, "loss": 1.6528, "step": 5506 }, { "epoch": 44.056, "grad_norm": 77.50253295898438, "learning_rate": 3.111111111111111e-05, "loss": 1.1561, "step": 5507 }, { "epoch": 44.064, "grad_norm": 14.9267578125, "learning_rate": 3.1106666666666665e-05, "loss": 0.9202, "step": 5508 }, { "epoch": 44.072, "grad_norm": 18.959627151489258, "learning_rate": 3.110222222222222e-05, "loss": 1.0617, "step": 5509 }, { "epoch": 44.08, "grad_norm": 23.617219924926758, "learning_rate": 3.109777777777778e-05, "loss": 1.8173, "step": 5510 }, { "epoch": 44.088, "grad_norm": 21.277021408081055, "learning_rate": 3.1093333333333336e-05, "loss": 0.7362, "step": 5511 }, { "epoch": 44.096, "grad_norm": 42.08322525024414, "learning_rate": 3.108888888888889e-05, "loss": 0.9402, "step": 5512 }, { "epoch": 44.104, "grad_norm": 17.154970169067383, "learning_rate": 3.1084444444444446e-05, "loss": 0.6959, "step": 5513 }, { "epoch": 44.112, "grad_norm": 48.17635726928711, "learning_rate": 3.108e-05, "loss": 0.8133, "step": 5514 }, { "epoch": 44.12, "grad_norm": 22.207256317138672, "learning_rate": 3.1075555555555555e-05, "loss": 1.2316, "step": 5515 }, { "epoch": 44.128, "grad_norm": 85.12278747558594, "learning_rate": 3.107111111111111e-05, "loss": 1.3327, "step": 5516 }, { "epoch": 44.136, "grad_norm": 23.47433090209961, "learning_rate": 3.1066666666666665e-05, "loss": 1.0693, "step": 5517 }, { "epoch": 44.144, "grad_norm": 19.465726852416992, "learning_rate": 3.106222222222223e-05, "loss": 1.42, "step": 5518 }, { "epoch": 44.152, "grad_norm": 78.18587493896484, "learning_rate": 3.105777777777778e-05, "loss": 1.2213, "step": 5519 }, { "epoch": 44.16, "grad_norm": 33.5923957824707, "learning_rate": 3.1053333333333336e-05, "loss": 1.0235, "step": 5520 }, { "epoch": 44.168, "grad_norm": 19.677335739135742, "learning_rate": 3.104888888888889e-05, "loss": 1.1214, "step": 5521 }, { "epoch": 44.176, "grad_norm": 38.14356231689453, "learning_rate": 3.1044444444444446e-05, "loss": 0.9697, "step": 5522 }, { "epoch": 44.184, "grad_norm": 21.2209415435791, "learning_rate": 3.104e-05, "loss": 1.3219, "step": 5523 }, { "epoch": 44.192, "grad_norm": 32.5030403137207, "learning_rate": 3.1035555555555556e-05, "loss": 1.2548, "step": 5524 }, { "epoch": 44.2, "grad_norm": 28.087881088256836, "learning_rate": 3.103111111111111e-05, "loss": 0.9317, "step": 5525 }, { "epoch": 44.208, "grad_norm": 18.676715850830078, "learning_rate": 3.102666666666667e-05, "loss": 1.1229, "step": 5526 }, { "epoch": 44.216, "grad_norm": 20.710145950317383, "learning_rate": 3.102222222222223e-05, "loss": 1.445, "step": 5527 }, { "epoch": 44.224, "grad_norm": 19.771974563598633, "learning_rate": 3.101777777777778e-05, "loss": 1.286, "step": 5528 }, { "epoch": 44.232, "grad_norm": 16.880022048950195, "learning_rate": 3.101333333333333e-05, "loss": 1.0113, "step": 5529 }, { "epoch": 44.24, "grad_norm": 23.42350196838379, "learning_rate": 3.100888888888889e-05, "loss": 1.1986, "step": 5530 }, { "epoch": 44.248, "grad_norm": 26.794307708740234, "learning_rate": 3.1004444444444447e-05, "loss": 1.2769, "step": 5531 }, { "epoch": 44.256, "grad_norm": 16.84111785888672, "learning_rate": 3.1e-05, "loss": 1.1339, "step": 5532 }, { "epoch": 44.264, "grad_norm": 39.624488830566406, "learning_rate": 3.0995555555555556e-05, "loss": 2.6184, "step": 5533 }, { "epoch": 44.272, "grad_norm": 27.504608154296875, "learning_rate": 3.099111111111112e-05, "loss": 0.7555, "step": 5534 }, { "epoch": 44.28, "grad_norm": 19.93617057800293, "learning_rate": 3.098666666666667e-05, "loss": 0.8698, "step": 5535 }, { "epoch": 44.288, "grad_norm": 31.150327682495117, "learning_rate": 3.098222222222222e-05, "loss": 1.2485, "step": 5536 }, { "epoch": 44.296, "grad_norm": 21.34897232055664, "learning_rate": 3.0977777777777776e-05, "loss": 1.0184, "step": 5537 }, { "epoch": 44.304, "grad_norm": 24.208019256591797, "learning_rate": 3.097333333333334e-05, "loss": 1.4461, "step": 5538 }, { "epoch": 44.312, "grad_norm": 18.287822723388672, "learning_rate": 3.096888888888889e-05, "loss": 0.8814, "step": 5539 }, { "epoch": 44.32, "grad_norm": 28.219873428344727, "learning_rate": 3.096444444444445e-05, "loss": 2.0724, "step": 5540 }, { "epoch": 44.328, "grad_norm": 72.94813537597656, "learning_rate": 3.096e-05, "loss": 0.949, "step": 5541 }, { "epoch": 44.336, "grad_norm": 48.69944381713867, "learning_rate": 3.0955555555555557e-05, "loss": 1.2511, "step": 5542 }, { "epoch": 44.344, "grad_norm": 29.931865692138672, "learning_rate": 3.095111111111111e-05, "loss": 0.669, "step": 5543 }, { "epoch": 44.352, "grad_norm": 19.82213020324707, "learning_rate": 3.0946666666666666e-05, "loss": 1.0979, "step": 5544 }, { "epoch": 44.36, "grad_norm": 55.98955154418945, "learning_rate": 3.094222222222222e-05, "loss": 1.4224, "step": 5545 }, { "epoch": 44.368, "grad_norm": 18.967483520507812, "learning_rate": 3.0937777777777776e-05, "loss": 0.8098, "step": 5546 }, { "epoch": 44.376, "grad_norm": 61.75593566894531, "learning_rate": 3.093333333333334e-05, "loss": 3.203, "step": 5547 }, { "epoch": 44.384, "grad_norm": 63.753360748291016, "learning_rate": 3.092888888888889e-05, "loss": 1.057, "step": 5548 }, { "epoch": 44.392, "grad_norm": 20.672563552856445, "learning_rate": 3.092444444444445e-05, "loss": 1.0497, "step": 5549 }, { "epoch": 44.4, "grad_norm": 37.943992614746094, "learning_rate": 3.092e-05, "loss": 0.8171, "step": 5550 }, { "epoch": 44.408, "grad_norm": 33.55646514892578, "learning_rate": 3.091555555555556e-05, "loss": 0.808, "step": 5551 }, { "epoch": 44.416, "grad_norm": 33.084468841552734, "learning_rate": 3.091111111111111e-05, "loss": 1.4199, "step": 5552 }, { "epoch": 44.424, "grad_norm": 23.44062614440918, "learning_rate": 3.090666666666667e-05, "loss": 0.7275, "step": 5553 }, { "epoch": 44.432, "grad_norm": 40.18806457519531, "learning_rate": 3.090222222222222e-05, "loss": 1.05, "step": 5554 }, { "epoch": 44.44, "grad_norm": 15.769125938415527, "learning_rate": 3.089777777777778e-05, "loss": 0.9323, "step": 5555 }, { "epoch": 44.448, "grad_norm": 30.107999801635742, "learning_rate": 3.089333333333334e-05, "loss": 0.8463, "step": 5556 }, { "epoch": 44.456, "grad_norm": 12.136287689208984, "learning_rate": 3.088888888888889e-05, "loss": 0.6562, "step": 5557 }, { "epoch": 44.464, "grad_norm": 28.220197677612305, "learning_rate": 3.088444444444444e-05, "loss": 0.895, "step": 5558 }, { "epoch": 44.472, "grad_norm": 17.08080291748047, "learning_rate": 3.088e-05, "loss": 0.7966, "step": 5559 }, { "epoch": 44.48, "grad_norm": 35.64628601074219, "learning_rate": 3.087555555555556e-05, "loss": 1.108, "step": 5560 }, { "epoch": 44.488, "grad_norm": 16.614421844482422, "learning_rate": 3.087111111111111e-05, "loss": 0.8266, "step": 5561 }, { "epoch": 44.496, "grad_norm": 27.286121368408203, "learning_rate": 3.086666666666667e-05, "loss": 0.6732, "step": 5562 }, { "epoch": 44.504, "grad_norm": 26.599647521972656, "learning_rate": 3.086222222222223e-05, "loss": 1.4509, "step": 5563 }, { "epoch": 44.512, "grad_norm": 14.897226333618164, "learning_rate": 3.085777777777778e-05, "loss": 1.0968, "step": 5564 }, { "epoch": 44.52, "grad_norm": 15.77621078491211, "learning_rate": 3.085333333333333e-05, "loss": 0.7671, "step": 5565 }, { "epoch": 44.528, "grad_norm": 20.554590225219727, "learning_rate": 3.0848888888888886e-05, "loss": 1.0235, "step": 5566 }, { "epoch": 44.536, "grad_norm": 75.60435485839844, "learning_rate": 3.084444444444445e-05, "loss": 1.3614, "step": 5567 }, { "epoch": 44.544, "grad_norm": 22.022119522094727, "learning_rate": 3.084e-05, "loss": 0.9662, "step": 5568 }, { "epoch": 44.552, "grad_norm": 28.635047912597656, "learning_rate": 3.083555555555556e-05, "loss": 1.168, "step": 5569 }, { "epoch": 44.56, "grad_norm": 24.0740909576416, "learning_rate": 3.083111111111111e-05, "loss": 1.3649, "step": 5570 }, { "epoch": 44.568, "grad_norm": 27.517913818359375, "learning_rate": 3.082666666666667e-05, "loss": 1.042, "step": 5571 }, { "epoch": 44.576, "grad_norm": 36.09880828857422, "learning_rate": 3.082222222222222e-05, "loss": 1.0858, "step": 5572 }, { "epoch": 44.584, "grad_norm": 25.10969352722168, "learning_rate": 3.081777777777778e-05, "loss": 1.3658, "step": 5573 }, { "epoch": 44.592, "grad_norm": 19.593826293945312, "learning_rate": 3.081333333333333e-05, "loss": 0.811, "step": 5574 }, { "epoch": 44.6, "grad_norm": 13.325226783752441, "learning_rate": 3.0808888888888894e-05, "loss": 1.133, "step": 5575 }, { "epoch": 44.608, "grad_norm": 21.33448600769043, "learning_rate": 3.080444444444445e-05, "loss": 0.9645, "step": 5576 }, { "epoch": 44.616, "grad_norm": 23.071847915649414, "learning_rate": 3.08e-05, "loss": 0.8506, "step": 5577 }, { "epoch": 44.624, "grad_norm": 28.278751373291016, "learning_rate": 3.079555555555556e-05, "loss": 0.7914, "step": 5578 }, { "epoch": 44.632, "grad_norm": 54.74924850463867, "learning_rate": 3.079111111111111e-05, "loss": 1.0995, "step": 5579 }, { "epoch": 44.64, "grad_norm": 26.609315872192383, "learning_rate": 3.078666666666667e-05, "loss": 1.0082, "step": 5580 }, { "epoch": 44.648, "grad_norm": 28.154069900512695, "learning_rate": 3.078222222222222e-05, "loss": 0.8647, "step": 5581 }, { "epoch": 44.656, "grad_norm": 16.146778106689453, "learning_rate": 3.077777777777778e-05, "loss": 0.856, "step": 5582 }, { "epoch": 44.664, "grad_norm": 20.057485580444336, "learning_rate": 3.077333333333334e-05, "loss": 1.5204, "step": 5583 }, { "epoch": 44.672, "grad_norm": 21.644866943359375, "learning_rate": 3.0768888888888894e-05, "loss": 0.7104, "step": 5584 }, { "epoch": 44.68, "grad_norm": 25.03325080871582, "learning_rate": 3.076444444444445e-05, "loss": 1.1691, "step": 5585 }, { "epoch": 44.688, "grad_norm": 17.5009765625, "learning_rate": 3.076e-05, "loss": 0.8314, "step": 5586 }, { "epoch": 44.696, "grad_norm": 23.196229934692383, "learning_rate": 3.075555555555556e-05, "loss": 0.7237, "step": 5587 }, { "epoch": 44.704, "grad_norm": 30.102893829345703, "learning_rate": 3.075111111111111e-05, "loss": 0.8872, "step": 5588 }, { "epoch": 44.712, "grad_norm": 11.370315551757812, "learning_rate": 3.074666666666667e-05, "loss": 1.1014, "step": 5589 }, { "epoch": 44.72, "grad_norm": 18.92432403564453, "learning_rate": 3.074222222222222e-05, "loss": 1.9957, "step": 5590 }, { "epoch": 44.728, "grad_norm": 16.134204864501953, "learning_rate": 3.0737777777777785e-05, "loss": 0.9077, "step": 5591 }, { "epoch": 44.736, "grad_norm": 194.76083374023438, "learning_rate": 3.073333333333334e-05, "loss": 0.952, "step": 5592 }, { "epoch": 44.744, "grad_norm": 22.780990600585938, "learning_rate": 3.072888888888889e-05, "loss": 1.6343, "step": 5593 }, { "epoch": 44.752, "grad_norm": 25.33888053894043, "learning_rate": 3.072444444444444e-05, "loss": 1.1637, "step": 5594 }, { "epoch": 44.76, "grad_norm": 22.055532455444336, "learning_rate": 3.072e-05, "loss": 1.2617, "step": 5595 }, { "epoch": 44.768, "grad_norm": 75.38133239746094, "learning_rate": 3.071555555555556e-05, "loss": 1.1857, "step": 5596 }, { "epoch": 44.776, "grad_norm": 19.994083404541016, "learning_rate": 3.0711111111111114e-05, "loss": 1.1663, "step": 5597 }, { "epoch": 44.784, "grad_norm": 25.144508361816406, "learning_rate": 3.070666666666667e-05, "loss": 1.3397, "step": 5598 }, { "epoch": 44.792, "grad_norm": 15.500497817993164, "learning_rate": 3.0702222222222223e-05, "loss": 1.4415, "step": 5599 }, { "epoch": 44.8, "grad_norm": 22.59313201904297, "learning_rate": 3.069777777777778e-05, "loss": 1.3787, "step": 5600 }, { "epoch": 44.808, "grad_norm": 28.098129272460938, "learning_rate": 3.069333333333333e-05, "loss": 1.6414, "step": 5601 }, { "epoch": 44.816, "grad_norm": 39.249393463134766, "learning_rate": 3.068888888888889e-05, "loss": 0.8596, "step": 5602 }, { "epoch": 44.824, "grad_norm": 15.706375122070312, "learning_rate": 3.068444444444444e-05, "loss": 1.1951, "step": 5603 }, { "epoch": 44.832, "grad_norm": 19.313125610351562, "learning_rate": 3.0680000000000004e-05, "loss": 1.1372, "step": 5604 }, { "epoch": 44.84, "grad_norm": 27.081241607666016, "learning_rate": 3.067555555555556e-05, "loss": 1.3079, "step": 5605 }, { "epoch": 44.848, "grad_norm": 39.31977462768555, "learning_rate": 3.0671111111111114e-05, "loss": 1.1353, "step": 5606 }, { "epoch": 44.856, "grad_norm": 16.366262435913086, "learning_rate": 3.066666666666667e-05, "loss": 1.1242, "step": 5607 }, { "epoch": 44.864, "grad_norm": 39.61151885986328, "learning_rate": 3.0662222222222224e-05, "loss": 1.0445, "step": 5608 }, { "epoch": 44.872, "grad_norm": 25.369098663330078, "learning_rate": 3.065777777777778e-05, "loss": 1.179, "step": 5609 }, { "epoch": 44.88, "grad_norm": 43.64060974121094, "learning_rate": 3.0653333333333333e-05, "loss": 0.974, "step": 5610 }, { "epoch": 44.888, "grad_norm": 12.163954734802246, "learning_rate": 3.064888888888889e-05, "loss": 0.9433, "step": 5611 }, { "epoch": 44.896, "grad_norm": 26.261558532714844, "learning_rate": 3.064444444444445e-05, "loss": 1.0712, "step": 5612 }, { "epoch": 44.904, "grad_norm": 14.373602867126465, "learning_rate": 3.0640000000000005e-05, "loss": 0.9791, "step": 5613 }, { "epoch": 44.912, "grad_norm": 52.64714813232422, "learning_rate": 3.063555555555555e-05, "loss": 0.8379, "step": 5614 }, { "epoch": 44.92, "grad_norm": 30.07917022705078, "learning_rate": 3.063111111111111e-05, "loss": 1.3422, "step": 5615 }, { "epoch": 44.928, "grad_norm": 25.093236923217773, "learning_rate": 3.062666666666667e-05, "loss": 0.9592, "step": 5616 }, { "epoch": 44.936, "grad_norm": 30.236724853515625, "learning_rate": 3.0622222222222224e-05, "loss": 0.7189, "step": 5617 }, { "epoch": 44.944, "grad_norm": 23.61443328857422, "learning_rate": 3.061777777777778e-05, "loss": 1.0507, "step": 5618 }, { "epoch": 44.952, "grad_norm": 23.727310180664062, "learning_rate": 3.0613333333333334e-05, "loss": 0.8962, "step": 5619 }, { "epoch": 44.96, "grad_norm": 28.566068649291992, "learning_rate": 3.0608888888888895e-05, "loss": 0.9238, "step": 5620 }, { "epoch": 44.968, "grad_norm": 34.23716354370117, "learning_rate": 3.0604444444444444e-05, "loss": 0.7787, "step": 5621 }, { "epoch": 44.976, "grad_norm": 21.41071319580078, "learning_rate": 3.06e-05, "loss": 0.8751, "step": 5622 }, { "epoch": 44.984, "grad_norm": 17.314477920532227, "learning_rate": 3.059555555555555e-05, "loss": 0.8478, "step": 5623 }, { "epoch": 44.992, "grad_norm": 239.86370849609375, "learning_rate": 3.0591111111111115e-05, "loss": 1.3875, "step": 5624 }, { "epoch": 45.0, "grad_norm": 28.20340347290039, "learning_rate": 3.058666666666667e-05, "loss": 0.9324, "step": 5625 }, { "epoch": 45.0, "eval_loss": 1.1157954931259155, "eval_map": 0.3896, "eval_map_50": 0.7391, "eval_map_75": 0.3775, "eval_map_Coverall": 0.6005, "eval_map_Face_Shield": 0.4943, "eval_map_Gloves": 0.3041, "eval_map_Goggles": 0.1584, "eval_map_Mask": 0.3909, "eval_map_large": 0.6047, "eval_map_medium": 0.2787, "eval_map_small": 0.2598, "eval_mar_1": 0.3082, "eval_mar_10": 0.543, "eval_mar_100": 0.5607, "eval_mar_100_Coverall": 0.7356, "eval_mar_100_Face_Shield": 0.6824, "eval_mar_100_Gloves": 0.4344, "eval_mar_100_Goggles": 0.4531, "eval_mar_100_Mask": 0.4981, "eval_mar_large": 0.7318, "eval_mar_medium": 0.4359, "eval_mar_small": 0.3423, "eval_runtime": 0.9394, "eval_samples_per_second": 30.87, "eval_steps_per_second": 2.129, "step": 5625 }, { "epoch": 45.008, "grad_norm": 43.66697692871094, "learning_rate": 3.0582222222222225e-05, "loss": 1.4256, "step": 5626 }, { "epoch": 45.016, "grad_norm": 20.255876541137695, "learning_rate": 3.057777777777778e-05, "loss": 0.8555, "step": 5627 }, { "epoch": 45.024, "grad_norm": 13.585939407348633, "learning_rate": 3.0573333333333334e-05, "loss": 0.9852, "step": 5628 }, { "epoch": 45.032, "grad_norm": 38.71426773071289, "learning_rate": 3.056888888888889e-05, "loss": 0.9885, "step": 5629 }, { "epoch": 45.04, "grad_norm": 18.010263442993164, "learning_rate": 3.0564444444444444e-05, "loss": 0.7961, "step": 5630 }, { "epoch": 45.048, "grad_norm": 23.15908432006836, "learning_rate": 3.056e-05, "loss": 0.8926, "step": 5631 }, { "epoch": 45.056, "grad_norm": 28.274072647094727, "learning_rate": 3.055555555555556e-05, "loss": 1.4362, "step": 5632 }, { "epoch": 45.064, "grad_norm": 24.903684616088867, "learning_rate": 3.0551111111111115e-05, "loss": 1.4555, "step": 5633 }, { "epoch": 45.072, "grad_norm": 38.475826263427734, "learning_rate": 3.054666666666667e-05, "loss": 0.995, "step": 5634 }, { "epoch": 45.08, "grad_norm": 27.714168548583984, "learning_rate": 3.0542222222222225e-05, "loss": 0.9233, "step": 5635 }, { "epoch": 45.088, "grad_norm": 21.633544921875, "learning_rate": 3.053777777777778e-05, "loss": 0.7954, "step": 5636 }, { "epoch": 45.096, "grad_norm": 28.786495208740234, "learning_rate": 3.0533333333333335e-05, "loss": 1.177, "step": 5637 }, { "epoch": 45.104, "grad_norm": 18.655136108398438, "learning_rate": 3.052888888888889e-05, "loss": 1.1568, "step": 5638 }, { "epoch": 45.112, "grad_norm": 36.212711334228516, "learning_rate": 3.0524444444444444e-05, "loss": 1.2226, "step": 5639 }, { "epoch": 45.12, "grad_norm": 18.245145797729492, "learning_rate": 3.0520000000000006e-05, "loss": 1.2966, "step": 5640 }, { "epoch": 45.128, "grad_norm": 56.609153747558594, "learning_rate": 3.0515555555555557e-05, "loss": 0.7915, "step": 5641 }, { "epoch": 45.136, "grad_norm": 24.099578857421875, "learning_rate": 3.0511111111111112e-05, "loss": 1.0532, "step": 5642 }, { "epoch": 45.144, "grad_norm": 31.604684829711914, "learning_rate": 3.0506666666666667e-05, "loss": 0.8488, "step": 5643 }, { "epoch": 45.152, "grad_norm": 35.655067443847656, "learning_rate": 3.0502222222222222e-05, "loss": 1.1347, "step": 5644 }, { "epoch": 45.16, "grad_norm": 22.352588653564453, "learning_rate": 3.049777777777778e-05, "loss": 0.8502, "step": 5645 }, { "epoch": 45.168, "grad_norm": 37.32448959350586, "learning_rate": 3.0493333333333335e-05, "loss": 0.9161, "step": 5646 }, { "epoch": 45.176, "grad_norm": 26.05480194091797, "learning_rate": 3.048888888888889e-05, "loss": 2.531, "step": 5647 }, { "epoch": 45.184, "grad_norm": 28.165935516357422, "learning_rate": 3.0484444444444445e-05, "loss": 1.2148, "step": 5648 }, { "epoch": 45.192, "grad_norm": 27.14253044128418, "learning_rate": 3.0480000000000003e-05, "loss": 1.2676, "step": 5649 }, { "epoch": 45.2, "grad_norm": 24.62065887451172, "learning_rate": 3.0475555555555558e-05, "loss": 1.2579, "step": 5650 }, { "epoch": 45.208, "grad_norm": 35.056278228759766, "learning_rate": 3.0471111111111113e-05, "loss": 0.9711, "step": 5651 }, { "epoch": 45.216, "grad_norm": 24.30632781982422, "learning_rate": 3.0466666666666664e-05, "loss": 0.9082, "step": 5652 }, { "epoch": 45.224, "grad_norm": 11.647074699401855, "learning_rate": 3.0462222222222226e-05, "loss": 0.9336, "step": 5653 }, { "epoch": 45.232, "grad_norm": 16.996530532836914, "learning_rate": 3.045777777777778e-05, "loss": 0.9356, "step": 5654 }, { "epoch": 45.24, "grad_norm": 36.441131591796875, "learning_rate": 3.0453333333333335e-05, "loss": 2.2074, "step": 5655 }, { "epoch": 45.248, "grad_norm": 73.80376434326172, "learning_rate": 3.0448888888888887e-05, "loss": 0.8134, "step": 5656 }, { "epoch": 45.256, "grad_norm": 64.3006820678711, "learning_rate": 3.044444444444445e-05, "loss": 1.0362, "step": 5657 }, { "epoch": 45.264, "grad_norm": 21.435993194580078, "learning_rate": 3.0440000000000003e-05, "loss": 0.9295, "step": 5658 }, { "epoch": 45.272, "grad_norm": 35.02735137939453, "learning_rate": 3.0435555555555555e-05, "loss": 1.4375, "step": 5659 }, { "epoch": 45.28, "grad_norm": 32.5574836730957, "learning_rate": 3.043111111111111e-05, "loss": 1.043, "step": 5660 }, { "epoch": 45.288, "grad_norm": 72.25423431396484, "learning_rate": 3.042666666666667e-05, "loss": 0.792, "step": 5661 }, { "epoch": 45.296, "grad_norm": 20.815895080566406, "learning_rate": 3.0422222222222223e-05, "loss": 0.6045, "step": 5662 }, { "epoch": 45.304, "grad_norm": 22.632612228393555, "learning_rate": 3.0417777777777778e-05, "loss": 1.0624, "step": 5663 }, { "epoch": 45.312, "grad_norm": 13.40441608428955, "learning_rate": 3.0413333333333332e-05, "loss": 1.2439, "step": 5664 }, { "epoch": 45.32, "grad_norm": 35.513328552246094, "learning_rate": 3.040888888888889e-05, "loss": 1.4779, "step": 5665 }, { "epoch": 45.328, "grad_norm": 77.19896697998047, "learning_rate": 3.0404444444444445e-05, "loss": 0.9578, "step": 5666 }, { "epoch": 45.336, "grad_norm": 23.592622756958008, "learning_rate": 3.04e-05, "loss": 1.2196, "step": 5667 }, { "epoch": 45.344, "grad_norm": 33.365108489990234, "learning_rate": 3.0395555555555555e-05, "loss": 0.9152, "step": 5668 }, { "epoch": 45.352, "grad_norm": 17.777048110961914, "learning_rate": 3.0391111111111113e-05, "loss": 0.8378, "step": 5669 }, { "epoch": 45.36, "grad_norm": 20.182647705078125, "learning_rate": 3.0386666666666668e-05, "loss": 1.0263, "step": 5670 }, { "epoch": 45.368, "grad_norm": 25.67144203186035, "learning_rate": 3.0382222222222223e-05, "loss": 1.0459, "step": 5671 }, { "epoch": 45.376, "grad_norm": 15.749159812927246, "learning_rate": 3.0377777777777778e-05, "loss": 1.2713, "step": 5672 }, { "epoch": 45.384, "grad_norm": 22.078386306762695, "learning_rate": 3.0373333333333336e-05, "loss": 1.1199, "step": 5673 }, { "epoch": 45.392, "grad_norm": 24.293811798095703, "learning_rate": 3.036888888888889e-05, "loss": 0.9355, "step": 5674 }, { "epoch": 45.4, "grad_norm": 26.25211524963379, "learning_rate": 3.0364444444444446e-05, "loss": 1.1342, "step": 5675 }, { "epoch": 45.408, "grad_norm": 28.882305145263672, "learning_rate": 3.036e-05, "loss": 0.8979, "step": 5676 }, { "epoch": 45.416, "grad_norm": 17.868501663208008, "learning_rate": 3.035555555555556e-05, "loss": 1.3271, "step": 5677 }, { "epoch": 45.424, "grad_norm": 15.579634666442871, "learning_rate": 3.0351111111111114e-05, "loss": 0.8487, "step": 5678 }, { "epoch": 45.432, "grad_norm": 37.935150146484375, "learning_rate": 3.034666666666667e-05, "loss": 1.1189, "step": 5679 }, { "epoch": 45.44, "grad_norm": 25.359722137451172, "learning_rate": 3.0342222222222223e-05, "loss": 0.9218, "step": 5680 }, { "epoch": 45.448, "grad_norm": 18.585622787475586, "learning_rate": 3.0337777777777782e-05, "loss": 0.8328, "step": 5681 }, { "epoch": 45.456, "grad_norm": 36.79863357543945, "learning_rate": 3.0333333333333337e-05, "loss": 1.0904, "step": 5682 }, { "epoch": 45.464, "grad_norm": 18.3190975189209, "learning_rate": 3.032888888888889e-05, "loss": 1.1196, "step": 5683 }, { "epoch": 45.472, "grad_norm": 16.531431198120117, "learning_rate": 3.0324444444444443e-05, "loss": 0.9822, "step": 5684 }, { "epoch": 45.48, "grad_norm": 16.785152435302734, "learning_rate": 3.0320000000000004e-05, "loss": 0.76, "step": 5685 }, { "epoch": 45.488, "grad_norm": 16.17903709411621, "learning_rate": 3.031555555555556e-05, "loss": 0.9177, "step": 5686 }, { "epoch": 45.496, "grad_norm": 23.679800033569336, "learning_rate": 3.031111111111111e-05, "loss": 0.8432, "step": 5687 }, { "epoch": 45.504, "grad_norm": 35.73164367675781, "learning_rate": 3.0306666666666666e-05, "loss": 1.0009, "step": 5688 }, { "epoch": 45.512, "grad_norm": 23.68637466430664, "learning_rate": 3.0302222222222227e-05, "loss": 0.9349, "step": 5689 }, { "epoch": 45.52, "grad_norm": 24.89667510986328, "learning_rate": 3.0297777777777782e-05, "loss": 1.1153, "step": 5690 }, { "epoch": 45.528, "grad_norm": 14.174250602722168, "learning_rate": 3.0293333333333334e-05, "loss": 0.9077, "step": 5691 }, { "epoch": 45.536, "grad_norm": 20.35660171508789, "learning_rate": 3.028888888888889e-05, "loss": 1.1994, "step": 5692 }, { "epoch": 45.544, "grad_norm": 87.04450225830078, "learning_rate": 3.028444444444445e-05, "loss": 0.9661, "step": 5693 }, { "epoch": 45.552, "grad_norm": 27.195341110229492, "learning_rate": 3.028e-05, "loss": 1.3181, "step": 5694 }, { "epoch": 45.56, "grad_norm": 24.615591049194336, "learning_rate": 3.0275555555555556e-05, "loss": 1.1623, "step": 5695 }, { "epoch": 45.568, "grad_norm": 34.78102493286133, "learning_rate": 3.027111111111111e-05, "loss": 1.0392, "step": 5696 }, { "epoch": 45.576, "grad_norm": 24.101093292236328, "learning_rate": 3.0266666666666666e-05, "loss": 1.0465, "step": 5697 }, { "epoch": 45.584, "grad_norm": 41.50400924682617, "learning_rate": 3.0262222222222224e-05, "loss": 1.129, "step": 5698 }, { "epoch": 45.592, "grad_norm": 24.588546752929688, "learning_rate": 3.025777777777778e-05, "loss": 1.1602, "step": 5699 }, { "epoch": 45.6, "grad_norm": 22.553634643554688, "learning_rate": 3.0253333333333334e-05, "loss": 1.1217, "step": 5700 }, { "epoch": 45.608, "grad_norm": 23.97455406188965, "learning_rate": 3.024888888888889e-05, "loss": 0.9478, "step": 5701 }, { "epoch": 45.616, "grad_norm": 171.9444122314453, "learning_rate": 3.0244444444444447e-05, "loss": 2.1906, "step": 5702 }, { "epoch": 45.624, "grad_norm": 34.609039306640625, "learning_rate": 3.0240000000000002e-05, "loss": 0.9164, "step": 5703 }, { "epoch": 45.632, "grad_norm": 25.375577926635742, "learning_rate": 3.0235555555555557e-05, "loss": 1.2859, "step": 5704 }, { "epoch": 45.64, "grad_norm": 30.997562408447266, "learning_rate": 3.023111111111111e-05, "loss": 1.1346, "step": 5705 }, { "epoch": 45.648, "grad_norm": 27.88825035095215, "learning_rate": 3.022666666666667e-05, "loss": 0.8793, "step": 5706 }, { "epoch": 45.656, "grad_norm": 16.93071746826172, "learning_rate": 3.0222222222222225e-05, "loss": 1.1235, "step": 5707 }, { "epoch": 45.664, "grad_norm": 34.30204772949219, "learning_rate": 3.021777777777778e-05, "loss": 1.2656, "step": 5708 }, { "epoch": 45.672, "grad_norm": 24.938077926635742, "learning_rate": 3.021333333333333e-05, "loss": 1.6571, "step": 5709 }, { "epoch": 45.68, "grad_norm": 37.61969757080078, "learning_rate": 3.0208888888888893e-05, "loss": 0.8014, "step": 5710 }, { "epoch": 45.688, "grad_norm": 28.099483489990234, "learning_rate": 3.0204444444444447e-05, "loss": 0.8029, "step": 5711 }, { "epoch": 45.696, "grad_norm": 20.988126754760742, "learning_rate": 3.02e-05, "loss": 1.1167, "step": 5712 }, { "epoch": 45.704, "grad_norm": 18.60327911376953, "learning_rate": 3.0195555555555554e-05, "loss": 0.7741, "step": 5713 }, { "epoch": 45.712, "grad_norm": 23.389339447021484, "learning_rate": 3.0191111111111115e-05, "loss": 1.0831, "step": 5714 }, { "epoch": 45.72, "grad_norm": 51.96977996826172, "learning_rate": 3.018666666666667e-05, "loss": 1.0224, "step": 5715 }, { "epoch": 45.728, "grad_norm": 22.5460262298584, "learning_rate": 3.018222222222222e-05, "loss": 1.0584, "step": 5716 }, { "epoch": 45.736, "grad_norm": 31.15022850036621, "learning_rate": 3.0177777777777776e-05, "loss": 1.8382, "step": 5717 }, { "epoch": 45.744, "grad_norm": 31.238811492919922, "learning_rate": 3.0173333333333338e-05, "loss": 0.8109, "step": 5718 }, { "epoch": 45.752, "grad_norm": 23.230342864990234, "learning_rate": 3.016888888888889e-05, "loss": 0.9017, "step": 5719 }, { "epoch": 45.76, "grad_norm": 50.42811965942383, "learning_rate": 3.0164444444444444e-05, "loss": 0.9509, "step": 5720 }, { "epoch": 45.768, "grad_norm": 22.839874267578125, "learning_rate": 3.016e-05, "loss": 0.7669, "step": 5721 }, { "epoch": 45.776, "grad_norm": 18.74475860595703, "learning_rate": 3.0155555555555557e-05, "loss": 1.332, "step": 5722 }, { "epoch": 45.784, "grad_norm": 34.57772445678711, "learning_rate": 3.0151111111111112e-05, "loss": 0.8551, "step": 5723 }, { "epoch": 45.792, "grad_norm": 39.09449768066406, "learning_rate": 3.0146666666666667e-05, "loss": 1.1698, "step": 5724 }, { "epoch": 45.8, "grad_norm": 18.906578063964844, "learning_rate": 3.0142222222222222e-05, "loss": 0.9321, "step": 5725 }, { "epoch": 45.808, "grad_norm": 32.49904251098633, "learning_rate": 3.013777777777778e-05, "loss": 1.2411, "step": 5726 }, { "epoch": 45.816, "grad_norm": 15.387941360473633, "learning_rate": 3.0133333333333335e-05, "loss": 1.131, "step": 5727 }, { "epoch": 45.824, "grad_norm": 43.92325210571289, "learning_rate": 3.012888888888889e-05, "loss": 1.237, "step": 5728 }, { "epoch": 45.832, "grad_norm": 19.433616638183594, "learning_rate": 3.0124444444444445e-05, "loss": 1.4608, "step": 5729 }, { "epoch": 45.84, "grad_norm": 507.0796203613281, "learning_rate": 3.0120000000000003e-05, "loss": 1.0724, "step": 5730 }, { "epoch": 45.848, "grad_norm": 19.11219024658203, "learning_rate": 3.0115555555555558e-05, "loss": 1.0149, "step": 5731 }, { "epoch": 45.856, "grad_norm": 45.16251754760742, "learning_rate": 3.0111111111111113e-05, "loss": 1.0744, "step": 5732 }, { "epoch": 45.864, "grad_norm": 28.788684844970703, "learning_rate": 3.0106666666666668e-05, "loss": 1.2531, "step": 5733 }, { "epoch": 45.872, "grad_norm": 28.15081214904785, "learning_rate": 3.0102222222222226e-05, "loss": 0.82, "step": 5734 }, { "epoch": 45.88, "grad_norm": 24.872316360473633, "learning_rate": 3.009777777777778e-05, "loss": 0.7397, "step": 5735 }, { "epoch": 45.888, "grad_norm": 100.98406982421875, "learning_rate": 3.0093333333333335e-05, "loss": 1.0583, "step": 5736 }, { "epoch": 45.896, "grad_norm": 74.62702178955078, "learning_rate": 3.008888888888889e-05, "loss": 1.0947, "step": 5737 }, { "epoch": 45.904, "grad_norm": 16.80573081970215, "learning_rate": 3.008444444444445e-05, "loss": 1.0647, "step": 5738 }, { "epoch": 45.912, "grad_norm": 23.170019149780273, "learning_rate": 3.0080000000000003e-05, "loss": 1.0941, "step": 5739 }, { "epoch": 45.92, "grad_norm": 22.438129425048828, "learning_rate": 3.0075555555555558e-05, "loss": 0.7615, "step": 5740 }, { "epoch": 45.928, "grad_norm": 37.38450241088867, "learning_rate": 3.007111111111111e-05, "loss": 1.0083, "step": 5741 }, { "epoch": 45.936, "grad_norm": 26.467140197753906, "learning_rate": 3.006666666666667e-05, "loss": 1.4176, "step": 5742 }, { "epoch": 45.944, "grad_norm": 24.607059478759766, "learning_rate": 3.0062222222222226e-05, "loss": 1.0208, "step": 5743 }, { "epoch": 45.952, "grad_norm": 19.1025390625, "learning_rate": 3.0057777777777778e-05, "loss": 0.9914, "step": 5744 }, { "epoch": 45.96, "grad_norm": 21.532474517822266, "learning_rate": 3.0053333333333332e-05, "loss": 0.8553, "step": 5745 }, { "epoch": 45.968, "grad_norm": 28.789791107177734, "learning_rate": 3.0048888888888894e-05, "loss": 1.5617, "step": 5746 }, { "epoch": 45.976, "grad_norm": 17.55372428894043, "learning_rate": 3.004444444444445e-05, "loss": 1.1011, "step": 5747 }, { "epoch": 45.984, "grad_norm": 33.096458435058594, "learning_rate": 3.004e-05, "loss": 1.1505, "step": 5748 }, { "epoch": 45.992, "grad_norm": 49.19508743286133, "learning_rate": 3.0035555555555555e-05, "loss": 1.4554, "step": 5749 }, { "epoch": 46.0, "grad_norm": 19.875568389892578, "learning_rate": 3.003111111111111e-05, "loss": 1.0136, "step": 5750 }, { "epoch": 46.0, "eval_loss": 1.1011228561401367, "eval_map": 0.3802, "eval_map_50": 0.7183, "eval_map_75": 0.3588, "eval_map_Coverall": 0.5832, "eval_map_Face_Shield": 0.4756, "eval_map_Gloves": 0.2964, "eval_map_Goggles": 0.1918, "eval_map_Mask": 0.3539, "eval_map_large": 0.6054, "eval_map_medium": 0.2667, "eval_map_small": 0.2182, "eval_mar_1": 0.2907, "eval_mar_10": 0.507, "eval_mar_100": 0.5279, "eval_mar_100_Coverall": 0.7356, "eval_mar_100_Face_Shield": 0.6059, "eval_mar_100_Gloves": 0.4246, "eval_mar_100_Goggles": 0.4313, "eval_mar_100_Mask": 0.4423, "eval_mar_large": 0.7078, "eval_mar_medium": 0.3999, "eval_mar_small": 0.25, "eval_runtime": 0.9429, "eval_samples_per_second": 30.756, "eval_steps_per_second": 2.121, "step": 5750 }, { "epoch": 46.008, "grad_norm": 22.49710464477539, "learning_rate": 3.0026666666666668e-05, "loss": 1.0898, "step": 5751 }, { "epoch": 46.016, "grad_norm": 23.066883087158203, "learning_rate": 3.0022222222222223e-05, "loss": 1.1688, "step": 5752 }, { "epoch": 46.024, "grad_norm": 12.974044799804688, "learning_rate": 3.0017777777777778e-05, "loss": 0.9265, "step": 5753 }, { "epoch": 46.032, "grad_norm": 19.180124282836914, "learning_rate": 3.0013333333333333e-05, "loss": 0.7604, "step": 5754 }, { "epoch": 46.04, "grad_norm": 16.513864517211914, "learning_rate": 3.000888888888889e-05, "loss": 0.7749, "step": 5755 }, { "epoch": 46.048, "grad_norm": 137.4193572998047, "learning_rate": 3.0004444444444446e-05, "loss": 0.9463, "step": 5756 }, { "epoch": 46.056, "grad_norm": 15.93712043762207, "learning_rate": 3e-05, "loss": 0.9836, "step": 5757 }, { "epoch": 46.064, "grad_norm": 38.2397575378418, "learning_rate": 2.9995555555555556e-05, "loss": 0.9932, "step": 5758 }, { "epoch": 46.072, "grad_norm": 26.074256896972656, "learning_rate": 2.9991111111111114e-05, "loss": 0.9408, "step": 5759 }, { "epoch": 46.08, "grad_norm": 31.0207576751709, "learning_rate": 2.998666666666667e-05, "loss": 0.8403, "step": 5760 }, { "epoch": 46.088, "grad_norm": 23.76458168029785, "learning_rate": 2.9982222222222224e-05, "loss": 0.9792, "step": 5761 }, { "epoch": 46.096, "grad_norm": 35.49465560913086, "learning_rate": 2.997777777777778e-05, "loss": 0.8871, "step": 5762 }, { "epoch": 46.104, "grad_norm": 68.22129821777344, "learning_rate": 2.9973333333333337e-05, "loss": 0.9081, "step": 5763 }, { "epoch": 46.112, "grad_norm": 18.667755126953125, "learning_rate": 2.996888888888889e-05, "loss": 1.0031, "step": 5764 }, { "epoch": 46.12, "grad_norm": 186.62158203125, "learning_rate": 2.9964444444444446e-05, "loss": 1.8104, "step": 5765 }, { "epoch": 46.128, "grad_norm": 275.76043701171875, "learning_rate": 2.9959999999999998e-05, "loss": 1.1337, "step": 5766 }, { "epoch": 46.136, "grad_norm": 34.51411437988281, "learning_rate": 2.995555555555556e-05, "loss": 1.0264, "step": 5767 }, { "epoch": 46.144, "grad_norm": 24.71122932434082, "learning_rate": 2.9951111111111114e-05, "loss": 0.9599, "step": 5768 }, { "epoch": 46.152, "grad_norm": 23.73366355895996, "learning_rate": 2.9946666666666666e-05, "loss": 1.3108, "step": 5769 }, { "epoch": 46.16, "grad_norm": 30.761812210083008, "learning_rate": 2.994222222222222e-05, "loss": 1.9756, "step": 5770 }, { "epoch": 46.168, "grad_norm": 32.69639587402344, "learning_rate": 2.9937777777777782e-05, "loss": 1.1354, "step": 5771 }, { "epoch": 46.176, "grad_norm": 47.238853454589844, "learning_rate": 2.9933333333333337e-05, "loss": 1.8678, "step": 5772 }, { "epoch": 46.184, "grad_norm": 54.51129913330078, "learning_rate": 2.992888888888889e-05, "loss": 0.8834, "step": 5773 }, { "epoch": 46.192, "grad_norm": 23.910856246948242, "learning_rate": 2.9924444444444443e-05, "loss": 0.6846, "step": 5774 }, { "epoch": 46.2, "grad_norm": 23.694053649902344, "learning_rate": 2.9920000000000005e-05, "loss": 1.5218, "step": 5775 }, { "epoch": 46.208, "grad_norm": 24.30851173400879, "learning_rate": 2.9915555555555556e-05, "loss": 1.1915, "step": 5776 }, { "epoch": 46.216, "grad_norm": 18.382625579833984, "learning_rate": 2.991111111111111e-05, "loss": 1.0181, "step": 5777 }, { "epoch": 46.224, "grad_norm": 20.857112884521484, "learning_rate": 2.9906666666666666e-05, "loss": 1.1013, "step": 5778 }, { "epoch": 46.232, "grad_norm": 41.872013092041016, "learning_rate": 2.9902222222222224e-05, "loss": 1.1034, "step": 5779 }, { "epoch": 46.24, "grad_norm": 14.041301727294922, "learning_rate": 2.989777777777778e-05, "loss": 1.1158, "step": 5780 }, { "epoch": 46.248, "grad_norm": 18.774267196655273, "learning_rate": 2.9893333333333334e-05, "loss": 1.0133, "step": 5781 }, { "epoch": 46.256, "grad_norm": 23.445266723632812, "learning_rate": 2.988888888888889e-05, "loss": 1.335, "step": 5782 }, { "epoch": 46.264, "grad_norm": 18.308488845825195, "learning_rate": 2.9884444444444447e-05, "loss": 0.8515, "step": 5783 }, { "epoch": 46.272, "grad_norm": 16.556121826171875, "learning_rate": 2.9880000000000002e-05, "loss": 1.3743, "step": 5784 }, { "epoch": 46.28, "grad_norm": 13.184361457824707, "learning_rate": 2.9875555555555557e-05, "loss": 0.7733, "step": 5785 }, { "epoch": 46.288, "grad_norm": 62.35004806518555, "learning_rate": 2.987111111111111e-05, "loss": 1.0858, "step": 5786 }, { "epoch": 46.296, "grad_norm": 17.277420043945312, "learning_rate": 2.986666666666667e-05, "loss": 1.1838, "step": 5787 }, { "epoch": 46.304, "grad_norm": 24.895381927490234, "learning_rate": 2.9862222222222225e-05, "loss": 1.0195, "step": 5788 }, { "epoch": 46.312, "grad_norm": 18.302488327026367, "learning_rate": 2.985777777777778e-05, "loss": 1.7121, "step": 5789 }, { "epoch": 46.32, "grad_norm": 27.225427627563477, "learning_rate": 2.9853333333333334e-05, "loss": 1.2134, "step": 5790 }, { "epoch": 46.328, "grad_norm": 16.012414932250977, "learning_rate": 2.9848888888888893e-05, "loss": 0.7236, "step": 5791 }, { "epoch": 46.336, "grad_norm": 22.99108123779297, "learning_rate": 2.9844444444444447e-05, "loss": 0.8087, "step": 5792 }, { "epoch": 46.344, "grad_norm": 24.050565719604492, "learning_rate": 2.9840000000000002e-05, "loss": 0.7378, "step": 5793 }, { "epoch": 46.352, "grad_norm": 20.577611923217773, "learning_rate": 2.9835555555555557e-05, "loss": 1.1439, "step": 5794 }, { "epoch": 46.36, "grad_norm": 24.20064926147461, "learning_rate": 2.9831111111111115e-05, "loss": 1.1062, "step": 5795 }, { "epoch": 46.368, "grad_norm": 26.984750747680664, "learning_rate": 2.982666666666667e-05, "loss": 1.1172, "step": 5796 }, { "epoch": 46.376, "grad_norm": 19.65692138671875, "learning_rate": 2.9822222222222225e-05, "loss": 0.7727, "step": 5797 }, { "epoch": 46.384, "grad_norm": 17.1455135345459, "learning_rate": 2.9817777777777777e-05, "loss": 0.9682, "step": 5798 }, { "epoch": 46.392, "grad_norm": 17.407474517822266, "learning_rate": 2.981333333333333e-05, "loss": 1.1182, "step": 5799 }, { "epoch": 46.4, "grad_norm": 25.207738876342773, "learning_rate": 2.9808888888888893e-05, "loss": 1.1813, "step": 5800 }, { "epoch": 46.408, "grad_norm": 19.90096092224121, "learning_rate": 2.9804444444444444e-05, "loss": 0.7967, "step": 5801 }, { "epoch": 46.416, "grad_norm": 21.219852447509766, "learning_rate": 2.98e-05, "loss": 0.9832, "step": 5802 }, { "epoch": 46.424, "grad_norm": 52.1961555480957, "learning_rate": 2.9795555555555554e-05, "loss": 0.9781, "step": 5803 }, { "epoch": 46.432, "grad_norm": 62.663658142089844, "learning_rate": 2.9791111111111116e-05, "loss": 0.9225, "step": 5804 }, { "epoch": 46.44, "grad_norm": 23.273656845092773, "learning_rate": 2.9786666666666667e-05, "loss": 1.1072, "step": 5805 }, { "epoch": 46.448, "grad_norm": 27.09464454650879, "learning_rate": 2.9782222222222222e-05, "loss": 0.843, "step": 5806 }, { "epoch": 46.456, "grad_norm": 10.669219017028809, "learning_rate": 2.9777777777777777e-05, "loss": 1.0977, "step": 5807 }, { "epoch": 46.464, "grad_norm": 22.632335662841797, "learning_rate": 2.9773333333333335e-05, "loss": 0.9731, "step": 5808 }, { "epoch": 46.472, "grad_norm": 22.393306732177734, "learning_rate": 2.976888888888889e-05, "loss": 0.7531, "step": 5809 }, { "epoch": 46.48, "grad_norm": 19.134233474731445, "learning_rate": 2.9764444444444445e-05, "loss": 1.0585, "step": 5810 }, { "epoch": 46.488, "grad_norm": 27.637956619262695, "learning_rate": 2.976e-05, "loss": 1.4288, "step": 5811 }, { "epoch": 46.496, "grad_norm": 26.766496658325195, "learning_rate": 2.9755555555555558e-05, "loss": 0.9317, "step": 5812 }, { "epoch": 46.504, "grad_norm": 26.625486373901367, "learning_rate": 2.9751111111111113e-05, "loss": 1.1791, "step": 5813 }, { "epoch": 46.512, "grad_norm": 27.457489013671875, "learning_rate": 2.9746666666666668e-05, "loss": 1.4382, "step": 5814 }, { "epoch": 46.52, "grad_norm": 16.817306518554688, "learning_rate": 2.9742222222222222e-05, "loss": 1.2567, "step": 5815 }, { "epoch": 46.528, "grad_norm": 20.23746109008789, "learning_rate": 2.973777777777778e-05, "loss": 1.1984, "step": 5816 }, { "epoch": 46.536, "grad_norm": 184.09115600585938, "learning_rate": 2.9733333333333336e-05, "loss": 0.9936, "step": 5817 }, { "epoch": 46.544, "grad_norm": 48.81568908691406, "learning_rate": 2.972888888888889e-05, "loss": 2.0793, "step": 5818 }, { "epoch": 46.552, "grad_norm": 20.42127799987793, "learning_rate": 2.9724444444444445e-05, "loss": 1.1536, "step": 5819 }, { "epoch": 46.56, "grad_norm": 16.23607063293457, "learning_rate": 2.9720000000000003e-05, "loss": 0.9102, "step": 5820 }, { "epoch": 46.568, "grad_norm": 41.453372955322266, "learning_rate": 2.9715555555555558e-05, "loss": 0.8645, "step": 5821 }, { "epoch": 46.576, "grad_norm": 19.298120498657227, "learning_rate": 2.9711111111111113e-05, "loss": 0.9742, "step": 5822 }, { "epoch": 46.584, "grad_norm": 19.12432861328125, "learning_rate": 2.9706666666666665e-05, "loss": 0.9012, "step": 5823 }, { "epoch": 46.592, "grad_norm": 38.77570343017578, "learning_rate": 2.9702222222222226e-05, "loss": 1.5776, "step": 5824 }, { "epoch": 46.6, "grad_norm": 21.465473175048828, "learning_rate": 2.969777777777778e-05, "loss": 0.7574, "step": 5825 }, { "epoch": 46.608, "grad_norm": 18.689950942993164, "learning_rate": 2.9693333333333333e-05, "loss": 0.9075, "step": 5826 }, { "epoch": 46.616, "grad_norm": 30.115968704223633, "learning_rate": 2.9688888888888887e-05, "loss": 1.4492, "step": 5827 }, { "epoch": 46.624, "grad_norm": 32.90153503417969, "learning_rate": 2.968444444444445e-05, "loss": 2.4266, "step": 5828 }, { "epoch": 46.632, "grad_norm": 21.382732391357422, "learning_rate": 2.9680000000000004e-05, "loss": 0.9875, "step": 5829 }, { "epoch": 46.64, "grad_norm": 24.533920288085938, "learning_rate": 2.9675555555555555e-05, "loss": 0.9507, "step": 5830 }, { "epoch": 46.648, "grad_norm": 20.564132690429688, "learning_rate": 2.967111111111111e-05, "loss": 0.9586, "step": 5831 }, { "epoch": 46.656, "grad_norm": 86.13766479492188, "learning_rate": 2.9666666666666672e-05, "loss": 1.0458, "step": 5832 }, { "epoch": 46.664, "grad_norm": 10.283857345581055, "learning_rate": 2.9662222222222223e-05, "loss": 1.2257, "step": 5833 }, { "epoch": 46.672, "grad_norm": 19.2333927154541, "learning_rate": 2.9657777777777778e-05, "loss": 0.8938, "step": 5834 }, { "epoch": 46.68, "grad_norm": 29.775815963745117, "learning_rate": 2.9653333333333333e-05, "loss": 1.6621, "step": 5835 }, { "epoch": 46.688, "grad_norm": 36.03990936279297, "learning_rate": 2.964888888888889e-05, "loss": 1.2903, "step": 5836 }, { "epoch": 46.696, "grad_norm": 16.533065795898438, "learning_rate": 2.9644444444444446e-05, "loss": 0.9582, "step": 5837 }, { "epoch": 46.704, "grad_norm": 23.583627700805664, "learning_rate": 2.964e-05, "loss": 1.046, "step": 5838 }, { "epoch": 46.712, "grad_norm": 17.79530143737793, "learning_rate": 2.9635555555555556e-05, "loss": 1.058, "step": 5839 }, { "epoch": 46.72, "grad_norm": 34.314903259277344, "learning_rate": 2.9631111111111114e-05, "loss": 0.9707, "step": 5840 }, { "epoch": 46.728, "grad_norm": 23.280988693237305, "learning_rate": 2.962666666666667e-05, "loss": 1.1459, "step": 5841 }, { "epoch": 46.736, "grad_norm": 12.452288627624512, "learning_rate": 2.9622222222222224e-05, "loss": 0.9015, "step": 5842 }, { "epoch": 46.744, "grad_norm": 22.932186126708984, "learning_rate": 2.961777777777778e-05, "loss": 1.0757, "step": 5843 }, { "epoch": 46.752, "grad_norm": 18.450740814208984, "learning_rate": 2.9613333333333337e-05, "loss": 0.9109, "step": 5844 }, { "epoch": 46.76, "grad_norm": 21.372007369995117, "learning_rate": 2.960888888888889e-05, "loss": 1.1523, "step": 5845 }, { "epoch": 46.768, "grad_norm": 11.892143249511719, "learning_rate": 2.9604444444444446e-05, "loss": 1.3338, "step": 5846 }, { "epoch": 46.776, "grad_norm": 24.529518127441406, "learning_rate": 2.96e-05, "loss": 1.3133, "step": 5847 }, { "epoch": 46.784, "grad_norm": 18.164682388305664, "learning_rate": 2.959555555555556e-05, "loss": 1.2606, "step": 5848 }, { "epoch": 46.792, "grad_norm": 17.992183685302734, "learning_rate": 2.9591111111111114e-05, "loss": 1.1729, "step": 5849 }, { "epoch": 46.8, "grad_norm": 24.076017379760742, "learning_rate": 2.958666666666667e-05, "loss": 0.9827, "step": 5850 }, { "epoch": 46.808, "grad_norm": 24.43817710876465, "learning_rate": 2.9582222222222224e-05, "loss": 1.02, "step": 5851 }, { "epoch": 46.816, "grad_norm": 21.754383087158203, "learning_rate": 2.9577777777777775e-05, "loss": 1.4066, "step": 5852 }, { "epoch": 46.824, "grad_norm": 16.553905487060547, "learning_rate": 2.9573333333333337e-05, "loss": 1.0781, "step": 5853 }, { "epoch": 46.832, "grad_norm": 25.599590301513672, "learning_rate": 2.9568888888888892e-05, "loss": 0.8516, "step": 5854 }, { "epoch": 46.84, "grad_norm": 17.02303695678711, "learning_rate": 2.9564444444444443e-05, "loss": 1.4851, "step": 5855 }, { "epoch": 46.848, "grad_norm": 25.603057861328125, "learning_rate": 2.9559999999999998e-05, "loss": 0.9199, "step": 5856 }, { "epoch": 46.856, "grad_norm": 24.43779182434082, "learning_rate": 2.955555555555556e-05, "loss": 0.8958, "step": 5857 }, { "epoch": 46.864, "grad_norm": 41.602745056152344, "learning_rate": 2.955111111111111e-05, "loss": 1.0292, "step": 5858 }, { "epoch": 46.872, "grad_norm": 15.866991996765137, "learning_rate": 2.9546666666666666e-05, "loss": 0.9139, "step": 5859 }, { "epoch": 46.88, "grad_norm": 23.215560913085938, "learning_rate": 2.954222222222222e-05, "loss": 1.2289, "step": 5860 }, { "epoch": 46.888, "grad_norm": 21.447906494140625, "learning_rate": 2.953777777777778e-05, "loss": 1.3259, "step": 5861 }, { "epoch": 46.896, "grad_norm": 25.364240646362305, "learning_rate": 2.9533333333333334e-05, "loss": 1.0671, "step": 5862 }, { "epoch": 46.904, "grad_norm": 20.359189987182617, "learning_rate": 2.952888888888889e-05, "loss": 1.1588, "step": 5863 }, { "epoch": 46.912, "grad_norm": 15.955594062805176, "learning_rate": 2.9524444444444444e-05, "loss": 1.0164, "step": 5864 }, { "epoch": 46.92, "grad_norm": 92.01947021484375, "learning_rate": 2.9520000000000002e-05, "loss": 1.0596, "step": 5865 }, { "epoch": 46.928, "grad_norm": 41.360992431640625, "learning_rate": 2.9515555555555557e-05, "loss": 0.932, "step": 5866 }, { "epoch": 46.936, "grad_norm": 26.226558685302734, "learning_rate": 2.951111111111111e-05, "loss": 0.8507, "step": 5867 }, { "epoch": 46.944, "grad_norm": 19.29670524597168, "learning_rate": 2.9506666666666667e-05, "loss": 0.9982, "step": 5868 }, { "epoch": 46.952, "grad_norm": 12.881609916687012, "learning_rate": 2.9502222222222225e-05, "loss": 1.2505, "step": 5869 }, { "epoch": 46.96, "grad_norm": 38.57623291015625, "learning_rate": 2.949777777777778e-05, "loss": 1.6244, "step": 5870 }, { "epoch": 46.968, "grad_norm": 29.198787689208984, "learning_rate": 2.9493333333333334e-05, "loss": 0.9642, "step": 5871 }, { "epoch": 46.976, "grad_norm": 17.22748374938965, "learning_rate": 2.948888888888889e-05, "loss": 0.9572, "step": 5872 }, { "epoch": 46.984, "grad_norm": 47.536293029785156, "learning_rate": 2.9484444444444448e-05, "loss": 1.1598, "step": 5873 }, { "epoch": 46.992, "grad_norm": 100.15447235107422, "learning_rate": 2.9480000000000002e-05, "loss": 0.6992, "step": 5874 }, { "epoch": 47.0, "grad_norm": 15.242057800292969, "learning_rate": 2.9475555555555557e-05, "loss": 0.9357, "step": 5875 }, { "epoch": 47.0, "eval_loss": 1.0537947416305542, "eval_map": 0.3982, "eval_map_50": 0.7609, "eval_map_75": 0.3641, "eval_map_Coverall": 0.6033, "eval_map_Face_Shield": 0.4601, "eval_map_Gloves": 0.3046, "eval_map_Goggles": 0.2374, "eval_map_Mask": 0.3853, "eval_map_large": 0.5698, "eval_map_medium": 0.274, "eval_map_small": 0.3344, "eval_mar_1": 0.315, "eval_mar_10": 0.5489, "eval_mar_100": 0.5612, "eval_mar_100_Coverall": 0.7244, "eval_mar_100_Face_Shield": 0.7176, "eval_mar_100_Gloves": 0.4311, "eval_mar_100_Goggles": 0.4656, "eval_mar_100_Mask": 0.4673, "eval_mar_large": 0.6875, "eval_mar_medium": 0.4332, "eval_mar_small": 0.3761, "eval_runtime": 0.9112, "eval_samples_per_second": 31.826, "eval_steps_per_second": 2.195, "step": 5875 }, { "epoch": 47.008, "grad_norm": 40.40620422363281, "learning_rate": 2.9471111111111112e-05, "loss": 0.7975, "step": 5876 }, { "epoch": 47.016, "grad_norm": 27.827556610107422, "learning_rate": 2.946666666666667e-05, "loss": 1.1358, "step": 5877 }, { "epoch": 47.024, "grad_norm": 12.175166130065918, "learning_rate": 2.9462222222222225e-05, "loss": 1.4146, "step": 5878 }, { "epoch": 47.032, "grad_norm": 32.35296630859375, "learning_rate": 2.945777777777778e-05, "loss": 1.0803, "step": 5879 }, { "epoch": 47.04, "grad_norm": 27.87110710144043, "learning_rate": 2.945333333333333e-05, "loss": 0.9908, "step": 5880 }, { "epoch": 47.048, "grad_norm": 71.40385437011719, "learning_rate": 2.9448888888888893e-05, "loss": 1.3962, "step": 5881 }, { "epoch": 47.056, "grad_norm": 28.00090980529785, "learning_rate": 2.9444444444444448e-05, "loss": 0.805, "step": 5882 }, { "epoch": 47.064, "grad_norm": 16.010540008544922, "learning_rate": 2.944e-05, "loss": 1.1903, "step": 5883 }, { "epoch": 47.072, "grad_norm": 17.75156593322754, "learning_rate": 2.9435555555555554e-05, "loss": 0.8827, "step": 5884 }, { "epoch": 47.08, "grad_norm": 18.319610595703125, "learning_rate": 2.9431111111111116e-05, "loss": 1.0328, "step": 5885 }, { "epoch": 47.088, "grad_norm": 37.20132827758789, "learning_rate": 2.942666666666667e-05, "loss": 0.7239, "step": 5886 }, { "epoch": 47.096, "grad_norm": 22.137544631958008, "learning_rate": 2.9422222222222222e-05, "loss": 1.1742, "step": 5887 }, { "epoch": 47.104, "grad_norm": 22.83905029296875, "learning_rate": 2.9417777777777777e-05, "loss": 0.9853, "step": 5888 }, { "epoch": 47.112, "grad_norm": 27.427356719970703, "learning_rate": 2.941333333333334e-05, "loss": 1.0051, "step": 5889 }, { "epoch": 47.12, "grad_norm": 44.219688415527344, "learning_rate": 2.940888888888889e-05, "loss": 0.8945, "step": 5890 }, { "epoch": 47.128, "grad_norm": 106.57627868652344, "learning_rate": 2.9404444444444445e-05, "loss": 0.9988, "step": 5891 }, { "epoch": 47.136, "grad_norm": 22.47977638244629, "learning_rate": 2.94e-05, "loss": 0.8698, "step": 5892 }, { "epoch": 47.144, "grad_norm": 789.181884765625, "learning_rate": 2.9395555555555558e-05, "loss": 1.6253, "step": 5893 }, { "epoch": 47.152, "grad_norm": 17.37040901184082, "learning_rate": 2.9391111111111113e-05, "loss": 0.898, "step": 5894 }, { "epoch": 47.16, "grad_norm": 20.216365814208984, "learning_rate": 2.9386666666666668e-05, "loss": 1.2567, "step": 5895 }, { "epoch": 47.168, "grad_norm": 12.6611328125, "learning_rate": 2.9382222222222222e-05, "loss": 1.0042, "step": 5896 }, { "epoch": 47.176, "grad_norm": 24.654184341430664, "learning_rate": 2.937777777777778e-05, "loss": 0.644, "step": 5897 }, { "epoch": 47.184, "grad_norm": 33.490013122558594, "learning_rate": 2.9373333333333336e-05, "loss": 1.8233, "step": 5898 }, { "epoch": 47.192, "grad_norm": 65.99839782714844, "learning_rate": 2.936888888888889e-05, "loss": 1.4718, "step": 5899 }, { "epoch": 47.2, "grad_norm": 25.16878890991211, "learning_rate": 2.9364444444444445e-05, "loss": 1.7525, "step": 5900 }, { "epoch": 47.208, "grad_norm": 29.271141052246094, "learning_rate": 2.9360000000000003e-05, "loss": 0.8294, "step": 5901 }, { "epoch": 47.216, "grad_norm": 33.03450012207031, "learning_rate": 2.935555555555556e-05, "loss": 1.1245, "step": 5902 }, { "epoch": 47.224, "grad_norm": 28.642187118530273, "learning_rate": 2.9351111111111113e-05, "loss": 1.1122, "step": 5903 }, { "epoch": 47.232, "grad_norm": 86.93534851074219, "learning_rate": 2.9346666666666668e-05, "loss": 1.1575, "step": 5904 }, { "epoch": 47.24, "grad_norm": 28.649654388427734, "learning_rate": 2.934222222222222e-05, "loss": 1.1039, "step": 5905 }, { "epoch": 47.248, "grad_norm": 40.37804412841797, "learning_rate": 2.933777777777778e-05, "loss": 0.8146, "step": 5906 }, { "epoch": 47.256, "grad_norm": 19.408029556274414, "learning_rate": 2.9333333333333336e-05, "loss": 0.8745, "step": 5907 }, { "epoch": 47.264, "grad_norm": 32.03013610839844, "learning_rate": 2.9328888888888887e-05, "loss": 0.9397, "step": 5908 }, { "epoch": 47.272, "grad_norm": 47.68890380859375, "learning_rate": 2.9324444444444442e-05, "loss": 1.3273, "step": 5909 }, { "epoch": 47.28, "grad_norm": 32.81391525268555, "learning_rate": 2.9320000000000004e-05, "loss": 1.1205, "step": 5910 }, { "epoch": 47.288, "grad_norm": 24.863719940185547, "learning_rate": 2.931555555555556e-05, "loss": 0.7164, "step": 5911 }, { "epoch": 47.296, "grad_norm": 23.30986213684082, "learning_rate": 2.931111111111111e-05, "loss": 1.151, "step": 5912 }, { "epoch": 47.304, "grad_norm": 23.614444732666016, "learning_rate": 2.9306666666666665e-05, "loss": 0.9415, "step": 5913 }, { "epoch": 47.312, "grad_norm": 68.56527709960938, "learning_rate": 2.9302222222222227e-05, "loss": 0.9689, "step": 5914 }, { "epoch": 47.32, "grad_norm": 42.897987365722656, "learning_rate": 2.9297777777777778e-05, "loss": 0.8416, "step": 5915 }, { "epoch": 47.328, "grad_norm": 20.24056625366211, "learning_rate": 2.9293333333333333e-05, "loss": 0.9582, "step": 5916 }, { "epoch": 47.336, "grad_norm": 27.728466033935547, "learning_rate": 2.9288888888888888e-05, "loss": 1.1259, "step": 5917 }, { "epoch": 47.344, "grad_norm": 19.049562454223633, "learning_rate": 2.9284444444444446e-05, "loss": 0.8388, "step": 5918 }, { "epoch": 47.352, "grad_norm": 86.25849151611328, "learning_rate": 2.928e-05, "loss": 1.0352, "step": 5919 }, { "epoch": 47.36, "grad_norm": 35.892215728759766, "learning_rate": 2.9275555555555556e-05, "loss": 1.2056, "step": 5920 }, { "epoch": 47.368, "grad_norm": 29.556190490722656, "learning_rate": 2.927111111111111e-05, "loss": 1.1739, "step": 5921 }, { "epoch": 47.376, "grad_norm": 22.546329498291016, "learning_rate": 2.926666666666667e-05, "loss": 1.2413, "step": 5922 }, { "epoch": 47.384, "grad_norm": 327.4508056640625, "learning_rate": 2.9262222222222224e-05, "loss": 1.2351, "step": 5923 }, { "epoch": 47.392, "grad_norm": 26.446863174438477, "learning_rate": 2.925777777777778e-05, "loss": 0.7691, "step": 5924 }, { "epoch": 47.4, "grad_norm": 28.180288314819336, "learning_rate": 2.9253333333333333e-05, "loss": 1.3097, "step": 5925 }, { "epoch": 47.408, "grad_norm": 27.83700942993164, "learning_rate": 2.924888888888889e-05, "loss": 1.0453, "step": 5926 }, { "epoch": 47.416, "grad_norm": 33.09273147583008, "learning_rate": 2.9244444444444446e-05, "loss": 1.0557, "step": 5927 }, { "epoch": 47.424, "grad_norm": 24.99190330505371, "learning_rate": 2.924e-05, "loss": 1.5589, "step": 5928 }, { "epoch": 47.432, "grad_norm": 67.35566711425781, "learning_rate": 2.9235555555555556e-05, "loss": 1.0413, "step": 5929 }, { "epoch": 47.44, "grad_norm": 21.53581428527832, "learning_rate": 2.9231111111111114e-05, "loss": 1.7436, "step": 5930 }, { "epoch": 47.448, "grad_norm": 16.455520629882812, "learning_rate": 2.922666666666667e-05, "loss": 1.1589, "step": 5931 }, { "epoch": 47.456, "grad_norm": 49.57453918457031, "learning_rate": 2.9222222222222224e-05, "loss": 1.1686, "step": 5932 }, { "epoch": 47.464, "grad_norm": 27.896682739257812, "learning_rate": 2.921777777777778e-05, "loss": 0.8296, "step": 5933 }, { "epoch": 47.472, "grad_norm": 23.404226303100586, "learning_rate": 2.9213333333333337e-05, "loss": 1.4958, "step": 5934 }, { "epoch": 47.48, "grad_norm": 34.3045654296875, "learning_rate": 2.9208888888888892e-05, "loss": 1.1115, "step": 5935 }, { "epoch": 47.488, "grad_norm": 26.577190399169922, "learning_rate": 2.9204444444444447e-05, "loss": 1.0826, "step": 5936 }, { "epoch": 47.496, "grad_norm": 26.711833953857422, "learning_rate": 2.9199999999999998e-05, "loss": 0.8097, "step": 5937 }, { "epoch": 47.504, "grad_norm": 39.11137008666992, "learning_rate": 2.919555555555556e-05, "loss": 0.9809, "step": 5938 }, { "epoch": 47.512, "grad_norm": 31.692180633544922, "learning_rate": 2.9191111111111115e-05, "loss": 1.2252, "step": 5939 }, { "epoch": 47.52, "grad_norm": 69.8512191772461, "learning_rate": 2.9186666666666666e-05, "loss": 1.0709, "step": 5940 }, { "epoch": 47.528, "grad_norm": 19.749595642089844, "learning_rate": 2.918222222222222e-05, "loss": 0.9272, "step": 5941 }, { "epoch": 47.536, "grad_norm": 34.364707946777344, "learning_rate": 2.9177777777777783e-05, "loss": 0.8726, "step": 5942 }, { "epoch": 47.544, "grad_norm": 67.01687622070312, "learning_rate": 2.9173333333333337e-05, "loss": 1.1952, "step": 5943 }, { "epoch": 47.552, "grad_norm": 30.380069732666016, "learning_rate": 2.916888888888889e-05, "loss": 0.9827, "step": 5944 }, { "epoch": 47.56, "grad_norm": 30.096567153930664, "learning_rate": 2.9164444444444444e-05, "loss": 1.09, "step": 5945 }, { "epoch": 47.568, "grad_norm": 29.26629066467285, "learning_rate": 2.9160000000000005e-05, "loss": 0.826, "step": 5946 }, { "epoch": 47.576, "grad_norm": 21.247005462646484, "learning_rate": 2.9155555555555557e-05, "loss": 1.1612, "step": 5947 }, { "epoch": 47.584, "grad_norm": 15.649072647094727, "learning_rate": 2.9151111111111112e-05, "loss": 1.2119, "step": 5948 }, { "epoch": 47.592, "grad_norm": 31.112478256225586, "learning_rate": 2.9146666666666667e-05, "loss": 1.2447, "step": 5949 }, { "epoch": 47.6, "grad_norm": 49.311580657958984, "learning_rate": 2.9142222222222225e-05, "loss": 1.1683, "step": 5950 }, { "epoch": 47.608, "grad_norm": 52.0279541015625, "learning_rate": 2.913777777777778e-05, "loss": 1.3224, "step": 5951 }, { "epoch": 47.616, "grad_norm": 18.39969825744629, "learning_rate": 2.9133333333333334e-05, "loss": 1.064, "step": 5952 }, { "epoch": 47.624, "grad_norm": 26.307723999023438, "learning_rate": 2.912888888888889e-05, "loss": 0.8679, "step": 5953 }, { "epoch": 47.632, "grad_norm": 22.329853057861328, "learning_rate": 2.9124444444444444e-05, "loss": 0.9443, "step": 5954 }, { "epoch": 47.64, "grad_norm": 30.817211151123047, "learning_rate": 2.9120000000000002e-05, "loss": 1.0456, "step": 5955 }, { "epoch": 47.648, "grad_norm": 36.736671447753906, "learning_rate": 2.9115555555555557e-05, "loss": 1.6232, "step": 5956 }, { "epoch": 47.656, "grad_norm": 23.394290924072266, "learning_rate": 2.9111111111111112e-05, "loss": 1.3717, "step": 5957 }, { "epoch": 47.664, "grad_norm": 27.166872024536133, "learning_rate": 2.9106666666666667e-05, "loss": 2.5601, "step": 5958 }, { "epoch": 47.672, "grad_norm": 27.453325271606445, "learning_rate": 2.9102222222222225e-05, "loss": 0.9263, "step": 5959 }, { "epoch": 47.68, "grad_norm": 20.345569610595703, "learning_rate": 2.909777777777778e-05, "loss": 0.8606, "step": 5960 }, { "epoch": 47.688, "grad_norm": 36.54138946533203, "learning_rate": 2.9093333333333335e-05, "loss": 1.0805, "step": 5961 }, { "epoch": 47.696, "grad_norm": 44.93388366699219, "learning_rate": 2.9088888888888886e-05, "loss": 1.116, "step": 5962 }, { "epoch": 47.704, "grad_norm": 28.626142501831055, "learning_rate": 2.9084444444444448e-05, "loss": 1.7108, "step": 5963 }, { "epoch": 47.712, "grad_norm": 20.91286849975586, "learning_rate": 2.9080000000000003e-05, "loss": 0.8644, "step": 5964 }, { "epoch": 47.72, "grad_norm": 30.37653350830078, "learning_rate": 2.9075555555555554e-05, "loss": 1.286, "step": 5965 }, { "epoch": 47.728, "grad_norm": 22.976350784301758, "learning_rate": 2.907111111111111e-05, "loss": 1.1462, "step": 5966 }, { "epoch": 47.736, "grad_norm": 22.619739532470703, "learning_rate": 2.906666666666667e-05, "loss": 1.0956, "step": 5967 }, { "epoch": 47.744, "grad_norm": 17.28680992126465, "learning_rate": 2.9062222222222226e-05, "loss": 1.1479, "step": 5968 }, { "epoch": 47.752, "grad_norm": 20.693626403808594, "learning_rate": 2.9057777777777777e-05, "loss": 0.8453, "step": 5969 }, { "epoch": 47.76, "grad_norm": 19.559173583984375, "learning_rate": 2.9053333333333332e-05, "loss": 0.8925, "step": 5970 }, { "epoch": 47.768, "grad_norm": 50.06292724609375, "learning_rate": 2.9048888888888893e-05, "loss": 1.1577, "step": 5971 }, { "epoch": 47.776, "grad_norm": 30.093242645263672, "learning_rate": 2.9044444444444445e-05, "loss": 0.7342, "step": 5972 }, { "epoch": 47.784, "grad_norm": 43.51205825805664, "learning_rate": 2.904e-05, "loss": 0.9131, "step": 5973 }, { "epoch": 47.792, "grad_norm": 18.693553924560547, "learning_rate": 2.9035555555555555e-05, "loss": 1.2379, "step": 5974 }, { "epoch": 47.8, "grad_norm": 27.462312698364258, "learning_rate": 2.9031111111111113e-05, "loss": 1.1389, "step": 5975 }, { "epoch": 47.808, "grad_norm": 24.979839324951172, "learning_rate": 2.9026666666666668e-05, "loss": 1.4627, "step": 5976 }, { "epoch": 47.816, "grad_norm": 18.064393997192383, "learning_rate": 2.9022222222222223e-05, "loss": 0.9766, "step": 5977 }, { "epoch": 47.824, "grad_norm": 31.297910690307617, "learning_rate": 2.9017777777777777e-05, "loss": 1.3004, "step": 5978 }, { "epoch": 47.832, "grad_norm": 27.151002883911133, "learning_rate": 2.9013333333333336e-05, "loss": 1.0506, "step": 5979 }, { "epoch": 47.84, "grad_norm": 23.6104679107666, "learning_rate": 2.900888888888889e-05, "loss": 0.8686, "step": 5980 }, { "epoch": 47.848, "grad_norm": 40.60283660888672, "learning_rate": 2.9004444444444445e-05, "loss": 1.2577, "step": 5981 }, { "epoch": 47.856, "grad_norm": 22.58481216430664, "learning_rate": 2.9e-05, "loss": 2.5974, "step": 5982 }, { "epoch": 47.864, "grad_norm": 105.09416961669922, "learning_rate": 2.899555555555556e-05, "loss": 0.8143, "step": 5983 }, { "epoch": 47.872, "grad_norm": 20.945789337158203, "learning_rate": 2.8991111111111113e-05, "loss": 0.9058, "step": 5984 }, { "epoch": 47.88, "grad_norm": 40.210933685302734, "learning_rate": 2.8986666666666668e-05, "loss": 0.974, "step": 5985 }, { "epoch": 47.888, "grad_norm": 26.82903480529785, "learning_rate": 2.8982222222222223e-05, "loss": 1.1245, "step": 5986 }, { "epoch": 47.896, "grad_norm": 28.804336547851562, "learning_rate": 2.897777777777778e-05, "loss": 0.8986, "step": 5987 }, { "epoch": 47.904, "grad_norm": 30.089826583862305, "learning_rate": 2.8973333333333336e-05, "loss": 0.9464, "step": 5988 }, { "epoch": 47.912, "grad_norm": 27.73128890991211, "learning_rate": 2.896888888888889e-05, "loss": 0.9993, "step": 5989 }, { "epoch": 47.92, "grad_norm": 53.70109558105469, "learning_rate": 2.8964444444444446e-05, "loss": 0.9409, "step": 5990 }, { "epoch": 47.928, "grad_norm": 15.2620849609375, "learning_rate": 2.8960000000000004e-05, "loss": 1.3277, "step": 5991 }, { "epoch": 47.936, "grad_norm": 32.13606262207031, "learning_rate": 2.895555555555556e-05, "loss": 1.3217, "step": 5992 }, { "epoch": 47.944, "grad_norm": 50.0564079284668, "learning_rate": 2.8951111111111114e-05, "loss": 1.007, "step": 5993 }, { "epoch": 47.952, "grad_norm": 11.576311111450195, "learning_rate": 2.8946666666666665e-05, "loss": 0.6638, "step": 5994 }, { "epoch": 47.96, "grad_norm": 13.814915657043457, "learning_rate": 2.8942222222222227e-05, "loss": 0.877, "step": 5995 }, { "epoch": 47.968, "grad_norm": 28.151914596557617, "learning_rate": 2.893777777777778e-05, "loss": 1.2305, "step": 5996 }, { "epoch": 47.976, "grad_norm": 20.449316024780273, "learning_rate": 2.8933333333333333e-05, "loss": 1.701, "step": 5997 }, { "epoch": 47.984, "grad_norm": 59.315067291259766, "learning_rate": 2.8928888888888888e-05, "loss": 0.979, "step": 5998 }, { "epoch": 47.992, "grad_norm": 42.49325942993164, "learning_rate": 2.892444444444445e-05, "loss": 0.5091, "step": 5999 }, { "epoch": 48.0, "grad_norm": 16.077316284179688, "learning_rate": 2.8920000000000004e-05, "loss": 0.9768, "step": 6000 }, { "epoch": 48.0, "eval_loss": 1.1037623882293701, "eval_map": 0.3695, "eval_map_50": 0.7187, "eval_map_75": 0.3331, "eval_map_Coverall": 0.562, "eval_map_Face_Shield": 0.4429, "eval_map_Gloves": 0.3047, "eval_map_Goggles": 0.1927, "eval_map_Mask": 0.345, "eval_map_large": 0.5493, "eval_map_medium": 0.2415, "eval_map_small": 0.3368, "eval_mar_1": 0.305, "eval_mar_10": 0.5232, "eval_mar_100": 0.5326, "eval_mar_100_Coverall": 0.7111, "eval_mar_100_Face_Shield": 0.6941, "eval_mar_100_Gloves": 0.4049, "eval_mar_100_Goggles": 0.4125, "eval_mar_100_Mask": 0.4404, "eval_mar_large": 0.6798, "eval_mar_medium": 0.3856, "eval_mar_small": 0.4136, "eval_runtime": 0.9294, "eval_samples_per_second": 31.202, "eval_steps_per_second": 2.152, "step": 6000 }, { "epoch": 48.008, "grad_norm": 18.008211135864258, "learning_rate": 2.8915555555555556e-05, "loss": 1.3056, "step": 6001 }, { "epoch": 48.016, "grad_norm": 18.77232551574707, "learning_rate": 2.891111111111111e-05, "loss": 1.1552, "step": 6002 }, { "epoch": 48.024, "grad_norm": 42.567543029785156, "learning_rate": 2.8906666666666672e-05, "loss": 2.6693, "step": 6003 }, { "epoch": 48.032, "grad_norm": 122.87769317626953, "learning_rate": 2.8902222222222224e-05, "loss": 0.7317, "step": 6004 }, { "epoch": 48.04, "grad_norm": 30.271127700805664, "learning_rate": 2.889777777777778e-05, "loss": 1.1892, "step": 6005 }, { "epoch": 48.048, "grad_norm": 30.89764404296875, "learning_rate": 2.8893333333333333e-05, "loss": 1.1729, "step": 6006 }, { "epoch": 48.056, "grad_norm": 16.84925651550293, "learning_rate": 2.8888888888888888e-05, "loss": 1.0476, "step": 6007 }, { "epoch": 48.064, "grad_norm": 22.066381454467773, "learning_rate": 2.8884444444444446e-05, "loss": 0.8197, "step": 6008 }, { "epoch": 48.072, "grad_norm": 21.827960968017578, "learning_rate": 2.888e-05, "loss": 1.2978, "step": 6009 }, { "epoch": 48.08, "grad_norm": 37.05769729614258, "learning_rate": 2.8875555555555556e-05, "loss": 0.7659, "step": 6010 }, { "epoch": 48.088, "grad_norm": 28.364900588989258, "learning_rate": 2.887111111111111e-05, "loss": 1.5761, "step": 6011 }, { "epoch": 48.096, "grad_norm": 15.306497573852539, "learning_rate": 2.886666666666667e-05, "loss": 1.1164, "step": 6012 }, { "epoch": 48.104, "grad_norm": 14.337142944335938, "learning_rate": 2.8862222222222224e-05, "loss": 0.8745, "step": 6013 }, { "epoch": 48.112, "grad_norm": 26.641008377075195, "learning_rate": 2.885777777777778e-05, "loss": 1.008, "step": 6014 }, { "epoch": 48.12, "grad_norm": 26.141206741333008, "learning_rate": 2.8853333333333334e-05, "loss": 0.753, "step": 6015 }, { "epoch": 48.128, "grad_norm": 33.35214614868164, "learning_rate": 2.8848888888888892e-05, "loss": 1.1883, "step": 6016 }, { "epoch": 48.136, "grad_norm": 24.51338768005371, "learning_rate": 2.8844444444444447e-05, "loss": 1.0974, "step": 6017 }, { "epoch": 48.144, "grad_norm": 27.41817283630371, "learning_rate": 2.8840000000000002e-05, "loss": 1.1865, "step": 6018 }, { "epoch": 48.152, "grad_norm": 61.31876754760742, "learning_rate": 2.8835555555555553e-05, "loss": 0.7971, "step": 6019 }, { "epoch": 48.16, "grad_norm": 17.983863830566406, "learning_rate": 2.8831111111111115e-05, "loss": 1.2428, "step": 6020 }, { "epoch": 48.168, "grad_norm": 30.58599090576172, "learning_rate": 2.882666666666667e-05, "loss": 0.8895, "step": 6021 }, { "epoch": 48.176, "grad_norm": 25.337997436523438, "learning_rate": 2.882222222222222e-05, "loss": 1.0411, "step": 6022 }, { "epoch": 48.184, "grad_norm": 17.781309127807617, "learning_rate": 2.8817777777777776e-05, "loss": 1.0866, "step": 6023 }, { "epoch": 48.192, "grad_norm": 39.53707504272461, "learning_rate": 2.8813333333333338e-05, "loss": 0.7472, "step": 6024 }, { "epoch": 48.2, "grad_norm": 18.4449520111084, "learning_rate": 2.8808888888888892e-05, "loss": 0.9344, "step": 6025 }, { "epoch": 48.208, "grad_norm": 29.78488540649414, "learning_rate": 2.8804444444444444e-05, "loss": 1.2589, "step": 6026 }, { "epoch": 48.216, "grad_norm": 16.802736282348633, "learning_rate": 2.88e-05, "loss": 1.2205, "step": 6027 }, { "epoch": 48.224, "grad_norm": 23.850605010986328, "learning_rate": 2.879555555555556e-05, "loss": 0.8887, "step": 6028 }, { "epoch": 48.232, "grad_norm": 14.789505958557129, "learning_rate": 2.8791111111111112e-05, "loss": 0.8579, "step": 6029 }, { "epoch": 48.24, "grad_norm": 16.06593132019043, "learning_rate": 2.8786666666666667e-05, "loss": 0.9913, "step": 6030 }, { "epoch": 48.248, "grad_norm": 18.499929428100586, "learning_rate": 2.878222222222222e-05, "loss": 0.9486, "step": 6031 }, { "epoch": 48.256, "grad_norm": 36.451934814453125, "learning_rate": 2.877777777777778e-05, "loss": 1.7558, "step": 6032 }, { "epoch": 48.264, "grad_norm": 29.824262619018555, "learning_rate": 2.8773333333333335e-05, "loss": 1.0149, "step": 6033 }, { "epoch": 48.272, "grad_norm": 23.559314727783203, "learning_rate": 2.876888888888889e-05, "loss": 1.1066, "step": 6034 }, { "epoch": 48.28, "grad_norm": 27.796485900878906, "learning_rate": 2.8764444444444444e-05, "loss": 0.9884, "step": 6035 }, { "epoch": 48.288, "grad_norm": 32.540287017822266, "learning_rate": 2.8760000000000002e-05, "loss": 0.6915, "step": 6036 }, { "epoch": 48.296, "grad_norm": 29.955673217773438, "learning_rate": 2.8755555555555557e-05, "loss": 1.308, "step": 6037 }, { "epoch": 48.304, "grad_norm": 252.15438842773438, "learning_rate": 2.8751111111111112e-05, "loss": 0.8376, "step": 6038 }, { "epoch": 48.312, "grad_norm": 15.535889625549316, "learning_rate": 2.8746666666666667e-05, "loss": 1.1842, "step": 6039 }, { "epoch": 48.32, "grad_norm": 28.336109161376953, "learning_rate": 2.8742222222222225e-05, "loss": 1.1307, "step": 6040 }, { "epoch": 48.328, "grad_norm": 23.87751007080078, "learning_rate": 2.873777777777778e-05, "loss": 1.1091, "step": 6041 }, { "epoch": 48.336, "grad_norm": 86.21673583984375, "learning_rate": 2.8733333333333335e-05, "loss": 1.6951, "step": 6042 }, { "epoch": 48.344, "grad_norm": 29.69703483581543, "learning_rate": 2.872888888888889e-05, "loss": 1.4011, "step": 6043 }, { "epoch": 48.352, "grad_norm": 23.0872859954834, "learning_rate": 2.8724444444444448e-05, "loss": 0.9643, "step": 6044 }, { "epoch": 48.36, "grad_norm": 34.73979949951172, "learning_rate": 2.8720000000000003e-05, "loss": 1.3586, "step": 6045 }, { "epoch": 48.368, "grad_norm": 17.62259864807129, "learning_rate": 2.8715555555555558e-05, "loss": 1.0122, "step": 6046 }, { "epoch": 48.376, "grad_norm": 46.3035888671875, "learning_rate": 2.8711111111111113e-05, "loss": 0.864, "step": 6047 }, { "epoch": 48.384, "grad_norm": 16.639375686645508, "learning_rate": 2.870666666666667e-05, "loss": 1.1732, "step": 6048 }, { "epoch": 48.392, "grad_norm": 26.99205207824707, "learning_rate": 2.8702222222222226e-05, "loss": 1.5565, "step": 6049 }, { "epoch": 48.4, "grad_norm": 16.635007858276367, "learning_rate": 2.869777777777778e-05, "loss": 1.6531, "step": 6050 }, { "epoch": 48.408, "grad_norm": 18.435773849487305, "learning_rate": 2.8693333333333332e-05, "loss": 1.1026, "step": 6051 }, { "epoch": 48.416, "grad_norm": 94.28723907470703, "learning_rate": 2.8688888888888894e-05, "loss": 0.8615, "step": 6052 }, { "epoch": 48.424, "grad_norm": 13.656746864318848, "learning_rate": 2.868444444444445e-05, "loss": 1.0409, "step": 6053 }, { "epoch": 48.432, "grad_norm": 26.67386817932129, "learning_rate": 2.868e-05, "loss": 0.8662, "step": 6054 }, { "epoch": 48.44, "grad_norm": 23.4913330078125, "learning_rate": 2.8675555555555555e-05, "loss": 0.9488, "step": 6055 }, { "epoch": 48.448, "grad_norm": 16.876062393188477, "learning_rate": 2.8671111111111116e-05, "loss": 0.9631, "step": 6056 }, { "epoch": 48.456, "grad_norm": 13.707362174987793, "learning_rate": 2.8666666666666668e-05, "loss": 0.9399, "step": 6057 }, { "epoch": 48.464, "grad_norm": 41.74146270751953, "learning_rate": 2.8662222222222223e-05, "loss": 1.2244, "step": 6058 }, { "epoch": 48.472, "grad_norm": 28.875944137573242, "learning_rate": 2.8657777777777777e-05, "loss": 0.8033, "step": 6059 }, { "epoch": 48.48, "grad_norm": 41.0594482421875, "learning_rate": 2.8653333333333332e-05, "loss": 0.95, "step": 6060 }, { "epoch": 48.488, "grad_norm": 26.69072914123535, "learning_rate": 2.864888888888889e-05, "loss": 0.9711, "step": 6061 }, { "epoch": 48.496, "grad_norm": 26.927927017211914, "learning_rate": 2.8644444444444445e-05, "loss": 0.7892, "step": 6062 }, { "epoch": 48.504, "grad_norm": 41.31848907470703, "learning_rate": 2.864e-05, "loss": 1.5769, "step": 6063 }, { "epoch": 48.512, "grad_norm": 34.98904037475586, "learning_rate": 2.8635555555555555e-05, "loss": 0.8456, "step": 6064 }, { "epoch": 48.52, "grad_norm": 31.78536033630371, "learning_rate": 2.8631111111111113e-05, "loss": 1.2289, "step": 6065 }, { "epoch": 48.528, "grad_norm": 23.774288177490234, "learning_rate": 2.8626666666666668e-05, "loss": 1.213, "step": 6066 }, { "epoch": 48.536, "grad_norm": 20.505470275878906, "learning_rate": 2.8622222222222223e-05, "loss": 1.0812, "step": 6067 }, { "epoch": 48.544, "grad_norm": 25.833913803100586, "learning_rate": 2.8617777777777778e-05, "loss": 1.0406, "step": 6068 }, { "epoch": 48.552, "grad_norm": 17.844947814941406, "learning_rate": 2.8613333333333336e-05, "loss": 0.6418, "step": 6069 }, { "epoch": 48.56, "grad_norm": 27.539989471435547, "learning_rate": 2.860888888888889e-05, "loss": 1.1809, "step": 6070 }, { "epoch": 48.568, "grad_norm": 85.5663070678711, "learning_rate": 2.8604444444444446e-05, "loss": 2.6066, "step": 6071 }, { "epoch": 48.576, "grad_norm": 34.72932815551758, "learning_rate": 2.86e-05, "loss": 1.2326, "step": 6072 }, { "epoch": 48.584, "grad_norm": 30.603267669677734, "learning_rate": 2.859555555555556e-05, "loss": 1.0928, "step": 6073 }, { "epoch": 48.592, "grad_norm": 18.669952392578125, "learning_rate": 2.8591111111111114e-05, "loss": 0.9073, "step": 6074 }, { "epoch": 48.6, "grad_norm": 16.23104476928711, "learning_rate": 2.858666666666667e-05, "loss": 0.8938, "step": 6075 }, { "epoch": 48.608, "grad_norm": 20.484525680541992, "learning_rate": 2.858222222222222e-05, "loss": 1.1381, "step": 6076 }, { "epoch": 48.616, "grad_norm": 23.105268478393555, "learning_rate": 2.857777777777778e-05, "loss": 0.9871, "step": 6077 }, { "epoch": 48.624, "grad_norm": 33.919036865234375, "learning_rate": 2.8573333333333336e-05, "loss": 1.6337, "step": 6078 }, { "epoch": 48.632, "grad_norm": 19.637338638305664, "learning_rate": 2.8568888888888888e-05, "loss": 1.0969, "step": 6079 }, { "epoch": 48.64, "grad_norm": 15.651528358459473, "learning_rate": 2.8564444444444443e-05, "loss": 0.77, "step": 6080 }, { "epoch": 48.648, "grad_norm": 58.410011291503906, "learning_rate": 2.8560000000000004e-05, "loss": 0.6403, "step": 6081 }, { "epoch": 48.656, "grad_norm": 17.192678451538086, "learning_rate": 2.855555555555556e-05, "loss": 0.8648, "step": 6082 }, { "epoch": 48.664, "grad_norm": 21.169797897338867, "learning_rate": 2.855111111111111e-05, "loss": 1.1695, "step": 6083 }, { "epoch": 48.672, "grad_norm": 26.43241310119629, "learning_rate": 2.8546666666666666e-05, "loss": 0.9865, "step": 6084 }, { "epoch": 48.68, "grad_norm": 27.494831085205078, "learning_rate": 2.8542222222222227e-05, "loss": 0.9962, "step": 6085 }, { "epoch": 48.688, "grad_norm": 63.298973083496094, "learning_rate": 2.853777777777778e-05, "loss": 0.8966, "step": 6086 }, { "epoch": 48.696, "grad_norm": 53.85635757446289, "learning_rate": 2.8533333333333333e-05, "loss": 1.0496, "step": 6087 }, { "epoch": 48.704, "grad_norm": 33.407073974609375, "learning_rate": 2.8528888888888888e-05, "loss": 1.8521, "step": 6088 }, { "epoch": 48.712, "grad_norm": 17.26319694519043, "learning_rate": 2.8524444444444447e-05, "loss": 0.6833, "step": 6089 }, { "epoch": 48.72, "grad_norm": 34.39692306518555, "learning_rate": 2.852e-05, "loss": 1.2311, "step": 6090 }, { "epoch": 48.728, "grad_norm": 15.937834739685059, "learning_rate": 2.8515555555555556e-05, "loss": 0.7728, "step": 6091 }, { "epoch": 48.736, "grad_norm": 31.379302978515625, "learning_rate": 2.851111111111111e-05, "loss": 1.0545, "step": 6092 }, { "epoch": 48.744, "grad_norm": 23.47063636779785, "learning_rate": 2.850666666666667e-05, "loss": 1.0914, "step": 6093 }, { "epoch": 48.752, "grad_norm": 27.01911163330078, "learning_rate": 2.8502222222222224e-05, "loss": 0.9424, "step": 6094 }, { "epoch": 48.76, "grad_norm": 30.240848541259766, "learning_rate": 2.849777777777778e-05, "loss": 0.8055, "step": 6095 }, { "epoch": 48.768, "grad_norm": 37.747676849365234, "learning_rate": 2.8493333333333334e-05, "loss": 1.101, "step": 6096 }, { "epoch": 48.776, "grad_norm": 104.75276947021484, "learning_rate": 2.8488888888888892e-05, "loss": 0.9926, "step": 6097 }, { "epoch": 48.784, "grad_norm": 42.57796096801758, "learning_rate": 2.8484444444444447e-05, "loss": 1.0835, "step": 6098 }, { "epoch": 48.792, "grad_norm": 18.953575134277344, "learning_rate": 2.8480000000000002e-05, "loss": 0.7463, "step": 6099 }, { "epoch": 48.8, "grad_norm": 58.443992614746094, "learning_rate": 2.8475555555555557e-05, "loss": 0.9866, "step": 6100 }, { "epoch": 48.808, "grad_norm": 18.742244720458984, "learning_rate": 2.8471111111111115e-05, "loss": 1.0491, "step": 6101 }, { "epoch": 48.816, "grad_norm": 30.572879791259766, "learning_rate": 2.846666666666667e-05, "loss": 1.1223, "step": 6102 }, { "epoch": 48.824, "grad_norm": 17.326250076293945, "learning_rate": 2.8462222222222225e-05, "loss": 1.0816, "step": 6103 }, { "epoch": 48.832, "grad_norm": 24.611488342285156, "learning_rate": 2.8457777777777776e-05, "loss": 1.0991, "step": 6104 }, { "epoch": 48.84, "grad_norm": 21.567031860351562, "learning_rate": 2.8453333333333338e-05, "loss": 0.9842, "step": 6105 }, { "epoch": 48.848, "grad_norm": 38.23756408691406, "learning_rate": 2.8448888888888892e-05, "loss": 1.0309, "step": 6106 }, { "epoch": 48.856, "grad_norm": 36.580322265625, "learning_rate": 2.8444444444444447e-05, "loss": 1.3351, "step": 6107 }, { "epoch": 48.864, "grad_norm": 32.35950469970703, "learning_rate": 2.844e-05, "loss": 0.9148, "step": 6108 }, { "epoch": 48.872, "grad_norm": 28.186376571655273, "learning_rate": 2.8435555555555554e-05, "loss": 1.1, "step": 6109 }, { "epoch": 48.88, "grad_norm": 24.03380012512207, "learning_rate": 2.8431111111111115e-05, "loss": 0.8217, "step": 6110 }, { "epoch": 48.888, "grad_norm": 13.110459327697754, "learning_rate": 2.8426666666666667e-05, "loss": 0.9365, "step": 6111 }, { "epoch": 48.896, "grad_norm": 21.885833740234375, "learning_rate": 2.842222222222222e-05, "loss": 1.3878, "step": 6112 }, { "epoch": 48.904, "grad_norm": 22.36998748779297, "learning_rate": 2.8417777777777776e-05, "loss": 0.7717, "step": 6113 }, { "epoch": 48.912, "grad_norm": 30.651960372924805, "learning_rate": 2.8413333333333335e-05, "loss": 0.8601, "step": 6114 }, { "epoch": 48.92, "grad_norm": 19.6265926361084, "learning_rate": 2.840888888888889e-05, "loss": 1.0811, "step": 6115 }, { "epoch": 48.928, "grad_norm": 78.07643127441406, "learning_rate": 2.8404444444444444e-05, "loss": 1.1346, "step": 6116 }, { "epoch": 48.936, "grad_norm": 37.58867645263672, "learning_rate": 2.84e-05, "loss": 1.8732, "step": 6117 }, { "epoch": 48.944, "grad_norm": 20.482036590576172, "learning_rate": 2.8395555555555557e-05, "loss": 1.0066, "step": 6118 }, { "epoch": 48.952, "grad_norm": 28.306640625, "learning_rate": 2.8391111111111112e-05, "loss": 1.0472, "step": 6119 }, { "epoch": 48.96, "grad_norm": 34.252227783203125, "learning_rate": 2.8386666666666667e-05, "loss": 0.7307, "step": 6120 }, { "epoch": 48.968, "grad_norm": 28.350778579711914, "learning_rate": 2.8382222222222222e-05, "loss": 1.1109, "step": 6121 }, { "epoch": 48.976, "grad_norm": 45.10905456542969, "learning_rate": 2.837777777777778e-05, "loss": 0.6831, "step": 6122 }, { "epoch": 48.984, "grad_norm": 58.987483978271484, "learning_rate": 2.8373333333333335e-05, "loss": 1.1891, "step": 6123 }, { "epoch": 48.992, "grad_norm": 24.140625, "learning_rate": 2.836888888888889e-05, "loss": 0.8553, "step": 6124 }, { "epoch": 49.0, "grad_norm": 90.3128662109375, "learning_rate": 2.8364444444444445e-05, "loss": 1.3057, "step": 6125 }, { "epoch": 49.0, "eval_loss": 1.1276522874832153, "eval_map": 0.384, "eval_map_50": 0.743, "eval_map_75": 0.3196, "eval_map_Coverall": 0.5633, "eval_map_Face_Shield": 0.4409, "eval_map_Gloves": 0.3186, "eval_map_Goggles": 0.2043, "eval_map_Mask": 0.3931, "eval_map_large": 0.5392, "eval_map_medium": 0.2642, "eval_map_small": 0.3254, "eval_mar_1": 0.3128, "eval_mar_10": 0.5255, "eval_mar_100": 0.5385, "eval_mar_100_Coverall": 0.6978, "eval_mar_100_Face_Shield": 0.6647, "eval_mar_100_Gloves": 0.4197, "eval_mar_100_Goggles": 0.4375, "eval_mar_100_Mask": 0.4731, "eval_mar_large": 0.6569, "eval_mar_medium": 0.4123, "eval_mar_small": 0.3759, "eval_runtime": 0.9224, "eval_samples_per_second": 31.438, "eval_steps_per_second": 2.168, "step": 6125 }, { "epoch": 49.008, "grad_norm": 75.07881927490234, "learning_rate": 2.8360000000000003e-05, "loss": 2.4196, "step": 6126 }, { "epoch": 49.016, "grad_norm": 31.03387451171875, "learning_rate": 2.8355555555555558e-05, "loss": 1.3296, "step": 6127 }, { "epoch": 49.024, "grad_norm": 27.762224197387695, "learning_rate": 2.8351111111111113e-05, "loss": 1.2248, "step": 6128 }, { "epoch": 49.032, "grad_norm": 22.945005416870117, "learning_rate": 2.8346666666666667e-05, "loss": 0.746, "step": 6129 }, { "epoch": 49.04, "grad_norm": 22.528635025024414, "learning_rate": 2.8342222222222226e-05, "loss": 1.2285, "step": 6130 }, { "epoch": 49.048, "grad_norm": 25.142419815063477, "learning_rate": 2.833777777777778e-05, "loss": 1.59, "step": 6131 }, { "epoch": 49.056, "grad_norm": 25.1518497467041, "learning_rate": 2.8333333333333335e-05, "loss": 1.0932, "step": 6132 }, { "epoch": 49.064, "grad_norm": 25.63279914855957, "learning_rate": 2.8328888888888887e-05, "loss": 0.9965, "step": 6133 }, { "epoch": 49.072, "grad_norm": 30.027551651000977, "learning_rate": 2.832444444444445e-05, "loss": 1.091, "step": 6134 }, { "epoch": 49.08, "grad_norm": 36.16604995727539, "learning_rate": 2.8320000000000003e-05, "loss": 1.1533, "step": 6135 }, { "epoch": 49.088, "grad_norm": 20.746976852416992, "learning_rate": 2.8315555555555555e-05, "loss": 1.0565, "step": 6136 }, { "epoch": 49.096, "grad_norm": 38.592899322509766, "learning_rate": 2.831111111111111e-05, "loss": 0.5667, "step": 6137 }, { "epoch": 49.104, "grad_norm": 58.78580856323242, "learning_rate": 2.830666666666667e-05, "loss": 1.0753, "step": 6138 }, { "epoch": 49.112, "grad_norm": 28.515718460083008, "learning_rate": 2.8302222222222226e-05, "loss": 0.813, "step": 6139 }, { "epoch": 49.12, "grad_norm": 16.25675392150879, "learning_rate": 2.8297777777777778e-05, "loss": 1.1261, "step": 6140 }, { "epoch": 49.128, "grad_norm": 18.48670196533203, "learning_rate": 2.8293333333333332e-05, "loss": 1.0727, "step": 6141 }, { "epoch": 49.136, "grad_norm": 21.773914337158203, "learning_rate": 2.8288888888888894e-05, "loss": 1.2348, "step": 6142 }, { "epoch": 49.144, "grad_norm": 22.93287467956543, "learning_rate": 2.8284444444444445e-05, "loss": 0.8491, "step": 6143 }, { "epoch": 49.152, "grad_norm": 16.534759521484375, "learning_rate": 2.828e-05, "loss": 0.9285, "step": 6144 }, { "epoch": 49.16, "grad_norm": 18.899438858032227, "learning_rate": 2.8275555555555555e-05, "loss": 1.3896, "step": 6145 }, { "epoch": 49.168, "grad_norm": 18.04861068725586, "learning_rate": 2.8271111111111113e-05, "loss": 0.907, "step": 6146 }, { "epoch": 49.176, "grad_norm": 28.453676223754883, "learning_rate": 2.8266666666666668e-05, "loss": 0.9049, "step": 6147 }, { "epoch": 49.184, "grad_norm": 78.93232727050781, "learning_rate": 2.8262222222222223e-05, "loss": 1.3529, "step": 6148 }, { "epoch": 49.192, "grad_norm": 19.097856521606445, "learning_rate": 2.8257777777777778e-05, "loss": 0.6299, "step": 6149 }, { "epoch": 49.2, "grad_norm": 37.02670669555664, "learning_rate": 2.8253333333333336e-05, "loss": 0.8375, "step": 6150 }, { "epoch": 49.208, "grad_norm": 38.20122146606445, "learning_rate": 2.824888888888889e-05, "loss": 1.0685, "step": 6151 }, { "epoch": 49.216, "grad_norm": 36.74368667602539, "learning_rate": 2.8244444444444446e-05, "loss": 1.1306, "step": 6152 }, { "epoch": 49.224, "grad_norm": 32.13734817504883, "learning_rate": 2.824e-05, "loss": 0.792, "step": 6153 }, { "epoch": 49.232, "grad_norm": 39.9112663269043, "learning_rate": 2.823555555555556e-05, "loss": 1.072, "step": 6154 }, { "epoch": 49.24, "grad_norm": 16.525604248046875, "learning_rate": 2.8231111111111114e-05, "loss": 1.0045, "step": 6155 }, { "epoch": 49.248, "grad_norm": 34.765140533447266, "learning_rate": 2.822666666666667e-05, "loss": 0.8337, "step": 6156 }, { "epoch": 49.256, "grad_norm": 39.790096282958984, "learning_rate": 2.8222222222222223e-05, "loss": 1.114, "step": 6157 }, { "epoch": 49.264, "grad_norm": 44.55694580078125, "learning_rate": 2.821777777777778e-05, "loss": 1.1159, "step": 6158 }, { "epoch": 49.272, "grad_norm": 15.163641929626465, "learning_rate": 2.8213333333333337e-05, "loss": 0.9382, "step": 6159 }, { "epoch": 49.28, "grad_norm": 19.873184204101562, "learning_rate": 2.820888888888889e-05, "loss": 0.9856, "step": 6160 }, { "epoch": 49.288, "grad_norm": 20.4200439453125, "learning_rate": 2.8204444444444443e-05, "loss": 0.9782, "step": 6161 }, { "epoch": 49.296, "grad_norm": 30.41004180908203, "learning_rate": 2.8199999999999998e-05, "loss": 1.2685, "step": 6162 }, { "epoch": 49.304, "grad_norm": 30.849857330322266, "learning_rate": 2.819555555555556e-05, "loss": 1.1491, "step": 6163 }, { "epoch": 49.312, "grad_norm": 35.38815689086914, "learning_rate": 2.8191111111111114e-05, "loss": 1.5501, "step": 6164 }, { "epoch": 49.32, "grad_norm": 46.882789611816406, "learning_rate": 2.8186666666666666e-05, "loss": 1.0179, "step": 6165 }, { "epoch": 49.328, "grad_norm": 52.56270980834961, "learning_rate": 2.818222222222222e-05, "loss": 1.7316, "step": 6166 }, { "epoch": 49.336, "grad_norm": 25.97531509399414, "learning_rate": 2.8177777777777782e-05, "loss": 0.7061, "step": 6167 }, { "epoch": 49.344, "grad_norm": 27.180410385131836, "learning_rate": 2.8173333333333334e-05, "loss": 1.0019, "step": 6168 }, { "epoch": 49.352, "grad_norm": 25.932586669921875, "learning_rate": 2.816888888888889e-05, "loss": 1.1149, "step": 6169 }, { "epoch": 49.36, "grad_norm": 23.829936981201172, "learning_rate": 2.8164444444444443e-05, "loss": 0.8574, "step": 6170 }, { "epoch": 49.368, "grad_norm": 138.4601593017578, "learning_rate": 2.816e-05, "loss": 0.9366, "step": 6171 }, { "epoch": 49.376, "grad_norm": 65.06156158447266, "learning_rate": 2.8155555555555556e-05, "loss": 1.2456, "step": 6172 }, { "epoch": 49.384, "grad_norm": 22.087413787841797, "learning_rate": 2.815111111111111e-05, "loss": 0.8548, "step": 6173 }, { "epoch": 49.392, "grad_norm": 19.480484008789062, "learning_rate": 2.8146666666666666e-05, "loss": 0.7686, "step": 6174 }, { "epoch": 49.4, "grad_norm": 31.750411987304688, "learning_rate": 2.8142222222222224e-05, "loss": 0.7712, "step": 6175 }, { "epoch": 49.408, "grad_norm": 25.821292877197266, "learning_rate": 2.813777777777778e-05, "loss": 0.6629, "step": 6176 }, { "epoch": 49.416, "grad_norm": 31.22748374938965, "learning_rate": 2.8133333333333334e-05, "loss": 1.7015, "step": 6177 }, { "epoch": 49.424, "grad_norm": 16.59832000732422, "learning_rate": 2.812888888888889e-05, "loss": 0.7988, "step": 6178 }, { "epoch": 49.432, "grad_norm": 29.26931381225586, "learning_rate": 2.8124444444444447e-05, "loss": 1.0271, "step": 6179 }, { "epoch": 49.44, "grad_norm": 51.909297943115234, "learning_rate": 2.8120000000000002e-05, "loss": 2.366, "step": 6180 }, { "epoch": 49.448, "grad_norm": 20.753162384033203, "learning_rate": 2.8115555555555557e-05, "loss": 0.8318, "step": 6181 }, { "epoch": 49.456, "grad_norm": 32.951683044433594, "learning_rate": 2.811111111111111e-05, "loss": 0.7875, "step": 6182 }, { "epoch": 49.464, "grad_norm": 15.900911331176758, "learning_rate": 2.810666666666667e-05, "loss": 1.1871, "step": 6183 }, { "epoch": 49.472, "grad_norm": 42.40288543701172, "learning_rate": 2.8102222222222225e-05, "loss": 1.0538, "step": 6184 }, { "epoch": 49.48, "grad_norm": 23.952213287353516, "learning_rate": 2.809777777777778e-05, "loss": 1.2224, "step": 6185 }, { "epoch": 49.488, "grad_norm": 24.096843719482422, "learning_rate": 2.8093333333333334e-05, "loss": 1.1467, "step": 6186 }, { "epoch": 49.496, "grad_norm": 23.11887550354004, "learning_rate": 2.8088888888888893e-05, "loss": 1.2409, "step": 6187 }, { "epoch": 49.504, "grad_norm": 17.860097885131836, "learning_rate": 2.8084444444444447e-05, "loss": 1.0146, "step": 6188 }, { "epoch": 49.512, "grad_norm": 18.039892196655273, "learning_rate": 2.8080000000000002e-05, "loss": 1.2949, "step": 6189 }, { "epoch": 49.52, "grad_norm": 15.251280784606934, "learning_rate": 2.8075555555555554e-05, "loss": 0.6499, "step": 6190 }, { "epoch": 49.528, "grad_norm": 19.5567684173584, "learning_rate": 2.8071111111111115e-05, "loss": 0.8247, "step": 6191 }, { "epoch": 49.536, "grad_norm": 34.5927848815918, "learning_rate": 2.806666666666667e-05, "loss": 1.2926, "step": 6192 }, { "epoch": 49.544, "grad_norm": 23.441991806030273, "learning_rate": 2.806222222222222e-05, "loss": 1.1658, "step": 6193 }, { "epoch": 49.552, "grad_norm": 15.267474174499512, "learning_rate": 2.8057777777777776e-05, "loss": 0.7533, "step": 6194 }, { "epoch": 49.56, "grad_norm": 19.6038761138916, "learning_rate": 2.8053333333333338e-05, "loss": 1.1351, "step": 6195 }, { "epoch": 49.568, "grad_norm": 33.16184616088867, "learning_rate": 2.8048888888888893e-05, "loss": 1.0282, "step": 6196 }, { "epoch": 49.576, "grad_norm": 32.00270462036133, "learning_rate": 2.8044444444444444e-05, "loss": 0.882, "step": 6197 }, { "epoch": 49.584, "grad_norm": 53.150428771972656, "learning_rate": 2.804e-05, "loss": 1.1601, "step": 6198 }, { "epoch": 49.592, "grad_norm": 31.916025161743164, "learning_rate": 2.803555555555556e-05, "loss": 0.9367, "step": 6199 }, { "epoch": 49.6, "grad_norm": 34.69462585449219, "learning_rate": 2.8031111111111112e-05, "loss": 1.0028, "step": 6200 }, { "epoch": 49.608, "grad_norm": 29.753273010253906, "learning_rate": 2.8026666666666667e-05, "loss": 1.0181, "step": 6201 }, { "epoch": 49.616, "grad_norm": 25.261943817138672, "learning_rate": 2.8022222222222222e-05, "loss": 1.0181, "step": 6202 }, { "epoch": 49.624, "grad_norm": 14.609105110168457, "learning_rate": 2.801777777777778e-05, "loss": 1.302, "step": 6203 }, { "epoch": 49.632, "grad_norm": 31.764007568359375, "learning_rate": 2.8013333333333335e-05, "loss": 0.7376, "step": 6204 }, { "epoch": 49.64, "grad_norm": 31.280540466308594, "learning_rate": 2.800888888888889e-05, "loss": 1.1188, "step": 6205 }, { "epoch": 49.648, "grad_norm": 58.93777847290039, "learning_rate": 2.8004444444444445e-05, "loss": 1.6236, "step": 6206 }, { "epoch": 49.656, "grad_norm": 43.666603088378906, "learning_rate": 2.8000000000000003e-05, "loss": 0.9945, "step": 6207 }, { "epoch": 49.664, "grad_norm": 42.903865814208984, "learning_rate": 2.7995555555555558e-05, "loss": 0.8726, "step": 6208 }, { "epoch": 49.672, "grad_norm": 28.860706329345703, "learning_rate": 2.7991111111111113e-05, "loss": 1.0288, "step": 6209 }, { "epoch": 49.68, "grad_norm": 19.6319522857666, "learning_rate": 2.7986666666666668e-05, "loss": 2.4633, "step": 6210 }, { "epoch": 49.688, "grad_norm": 43.21915054321289, "learning_rate": 2.7982222222222226e-05, "loss": 1.2731, "step": 6211 }, { "epoch": 49.696, "grad_norm": 26.59419059753418, "learning_rate": 2.797777777777778e-05, "loss": 0.9095, "step": 6212 }, { "epoch": 49.704, "grad_norm": 58.10338592529297, "learning_rate": 2.7973333333333335e-05, "loss": 0.7756, "step": 6213 }, { "epoch": 49.712, "grad_norm": 19.305648803710938, "learning_rate": 2.796888888888889e-05, "loss": 1.2263, "step": 6214 }, { "epoch": 49.72, "grad_norm": 35.21809768676758, "learning_rate": 2.7964444444444442e-05, "loss": 0.9845, "step": 6215 }, { "epoch": 49.728, "grad_norm": 24.614946365356445, "learning_rate": 2.7960000000000003e-05, "loss": 0.8807, "step": 6216 }, { "epoch": 49.736, "grad_norm": 28.227182388305664, "learning_rate": 2.7955555555555558e-05, "loss": 1.1796, "step": 6217 }, { "epoch": 49.744, "grad_norm": 30.980369567871094, "learning_rate": 2.795111111111111e-05, "loss": 2.5932, "step": 6218 }, { "epoch": 49.752, "grad_norm": 32.634273529052734, "learning_rate": 2.7946666666666664e-05, "loss": 0.9156, "step": 6219 }, { "epoch": 49.76, "grad_norm": 20.416406631469727, "learning_rate": 2.7942222222222226e-05, "loss": 0.8389, "step": 6220 }, { "epoch": 49.768, "grad_norm": 17.75328826904297, "learning_rate": 2.793777777777778e-05, "loss": 0.8369, "step": 6221 }, { "epoch": 49.776, "grad_norm": 37.13310241699219, "learning_rate": 2.7933333333333332e-05, "loss": 0.982, "step": 6222 }, { "epoch": 49.784, "grad_norm": 13.549261093139648, "learning_rate": 2.7928888888888887e-05, "loss": 0.8215, "step": 6223 }, { "epoch": 49.792, "grad_norm": 58.280128479003906, "learning_rate": 2.792444444444445e-05, "loss": 1.0221, "step": 6224 }, { "epoch": 49.8, "grad_norm": 22.80208396911621, "learning_rate": 2.792e-05, "loss": 1.3295, "step": 6225 }, { "epoch": 49.808, "grad_norm": 19.754846572875977, "learning_rate": 2.7915555555555555e-05, "loss": 1.0162, "step": 6226 }, { "epoch": 49.816, "grad_norm": 18.31835174560547, "learning_rate": 2.791111111111111e-05, "loss": 1.0389, "step": 6227 }, { "epoch": 49.824, "grad_norm": 29.58186149597168, "learning_rate": 2.7906666666666668e-05, "loss": 0.7604, "step": 6228 }, { "epoch": 49.832, "grad_norm": 37.447513580322266, "learning_rate": 2.7902222222222223e-05, "loss": 1.0461, "step": 6229 }, { "epoch": 49.84, "grad_norm": 23.65291976928711, "learning_rate": 2.7897777777777778e-05, "loss": 1.6558, "step": 6230 }, { "epoch": 49.848, "grad_norm": 33.70512771606445, "learning_rate": 2.7893333333333333e-05, "loss": 1.1501, "step": 6231 }, { "epoch": 49.856, "grad_norm": 11.297999382019043, "learning_rate": 2.788888888888889e-05, "loss": 0.8972, "step": 6232 }, { "epoch": 49.864, "grad_norm": 19.15785026550293, "learning_rate": 2.7884444444444446e-05, "loss": 0.8595, "step": 6233 }, { "epoch": 49.872, "grad_norm": 17.651716232299805, "learning_rate": 2.788e-05, "loss": 1.0709, "step": 6234 }, { "epoch": 49.88, "grad_norm": 26.710914611816406, "learning_rate": 2.7875555555555556e-05, "loss": 0.8438, "step": 6235 }, { "epoch": 49.888, "grad_norm": 29.757909774780273, "learning_rate": 2.7871111111111114e-05, "loss": 0.8565, "step": 6236 }, { "epoch": 49.896, "grad_norm": 60.77159118652344, "learning_rate": 2.786666666666667e-05, "loss": 1.0187, "step": 6237 }, { "epoch": 49.904, "grad_norm": 50.512596130371094, "learning_rate": 2.7862222222222223e-05, "loss": 0.8664, "step": 6238 }, { "epoch": 49.912, "grad_norm": 17.02239990234375, "learning_rate": 2.785777777777778e-05, "loss": 0.5319, "step": 6239 }, { "epoch": 49.92, "grad_norm": 20.647756576538086, "learning_rate": 2.7853333333333337e-05, "loss": 1.5235, "step": 6240 }, { "epoch": 49.928, "grad_norm": 22.400707244873047, "learning_rate": 2.784888888888889e-05, "loss": 1.0912, "step": 6241 }, { "epoch": 49.936, "grad_norm": 33.56187438964844, "learning_rate": 2.7844444444444446e-05, "loss": 1.0571, "step": 6242 }, { "epoch": 49.944, "grad_norm": 24.5269832611084, "learning_rate": 2.7839999999999998e-05, "loss": 1.0859, "step": 6243 }, { "epoch": 49.952, "grad_norm": 21.577539443969727, "learning_rate": 2.783555555555556e-05, "loss": 1.0295, "step": 6244 }, { "epoch": 49.96, "grad_norm": 15.781170845031738, "learning_rate": 2.7831111111111114e-05, "loss": 1.2209, "step": 6245 }, { "epoch": 49.968, "grad_norm": 22.39644432067871, "learning_rate": 2.782666666666667e-05, "loss": 1.1111, "step": 6246 }, { "epoch": 49.976, "grad_norm": 20.92344856262207, "learning_rate": 2.782222222222222e-05, "loss": 0.8902, "step": 6247 }, { "epoch": 49.984, "grad_norm": 34.8471565246582, "learning_rate": 2.7817777777777782e-05, "loss": 1.1055, "step": 6248 }, { "epoch": 49.992, "grad_norm": 18.694412231445312, "learning_rate": 2.7813333333333337e-05, "loss": 0.9332, "step": 6249 }, { "epoch": 50.0, "grad_norm": 13.85278034210205, "learning_rate": 2.780888888888889e-05, "loss": 0.8473, "step": 6250 }, { "epoch": 50.0, "eval_loss": 1.1321284770965576, "eval_map": 0.3696, "eval_map_50": 0.7146, "eval_map_75": 0.3547, "eval_map_Coverall": 0.5914, "eval_map_Face_Shield": 0.3905, "eval_map_Gloves": 0.3041, "eval_map_Goggles": 0.2224, "eval_map_Mask": 0.3398, "eval_map_large": 0.5934, "eval_map_medium": 0.243, "eval_map_small": 0.2113, "eval_mar_1": 0.3012, "eval_mar_10": 0.5245, "eval_mar_100": 0.5477, "eval_mar_100_Coverall": 0.7133, "eval_mar_100_Face_Shield": 0.6706, "eval_mar_100_Gloves": 0.4098, "eval_mar_100_Goggles": 0.4969, "eval_mar_100_Mask": 0.4481, "eval_mar_large": 0.6926, "eval_mar_medium": 0.4166, "eval_mar_small": 0.3656, "eval_runtime": 0.9356, "eval_samples_per_second": 30.996, "eval_steps_per_second": 2.138, "step": 6250 }, { "epoch": 50.008, "grad_norm": 46.489158630371094, "learning_rate": 2.7804444444444443e-05, "loss": 1.084, "step": 6251 }, { "epoch": 50.016, "grad_norm": 77.03510284423828, "learning_rate": 2.7800000000000005e-05, "loss": 0.9113, "step": 6252 }, { "epoch": 50.024, "grad_norm": 31.14971160888672, "learning_rate": 2.7795555555555556e-05, "loss": 0.9135, "step": 6253 }, { "epoch": 50.032, "grad_norm": 24.604522705078125, "learning_rate": 2.779111111111111e-05, "loss": 0.7334, "step": 6254 }, { "epoch": 50.04, "grad_norm": 16.115951538085938, "learning_rate": 2.7786666666666666e-05, "loss": 2.168, "step": 6255 }, { "epoch": 50.048, "grad_norm": 35.92473602294922, "learning_rate": 2.7782222222222228e-05, "loss": 1.2142, "step": 6256 }, { "epoch": 50.056, "grad_norm": 24.777162551879883, "learning_rate": 2.777777777777778e-05, "loss": 0.9371, "step": 6257 }, { "epoch": 50.064, "grad_norm": 59.82781219482422, "learning_rate": 2.7773333333333334e-05, "loss": 0.9909, "step": 6258 }, { "epoch": 50.072, "grad_norm": 17.586048126220703, "learning_rate": 2.776888888888889e-05, "loss": 0.7096, "step": 6259 }, { "epoch": 50.08, "grad_norm": 397.6136169433594, "learning_rate": 2.7764444444444447e-05, "loss": 1.557, "step": 6260 }, { "epoch": 50.088, "grad_norm": 27.868942260742188, "learning_rate": 2.7760000000000002e-05, "loss": 1.2642, "step": 6261 }, { "epoch": 50.096, "grad_norm": 23.879606246948242, "learning_rate": 2.7755555555555557e-05, "loss": 0.9302, "step": 6262 }, { "epoch": 50.104, "grad_norm": 37.1677131652832, "learning_rate": 2.775111111111111e-05, "loss": 1.136, "step": 6263 }, { "epoch": 50.112, "grad_norm": 30.542251586914062, "learning_rate": 2.7746666666666666e-05, "loss": 0.8662, "step": 6264 }, { "epoch": 50.12, "grad_norm": 81.25465393066406, "learning_rate": 2.7742222222222225e-05, "loss": 0.9599, "step": 6265 }, { "epoch": 50.128, "grad_norm": 34.42457962036133, "learning_rate": 2.773777777777778e-05, "loss": 1.0966, "step": 6266 }, { "epoch": 50.136, "grad_norm": 28.559459686279297, "learning_rate": 2.7733333333333334e-05, "loss": 0.8537, "step": 6267 }, { "epoch": 50.144, "grad_norm": 32.293434143066406, "learning_rate": 2.772888888888889e-05, "loss": 0.8944, "step": 6268 }, { "epoch": 50.152, "grad_norm": 19.22408676147461, "learning_rate": 2.7724444444444447e-05, "loss": 0.8887, "step": 6269 }, { "epoch": 50.16, "grad_norm": 28.247196197509766, "learning_rate": 2.7720000000000002e-05, "loss": 0.8595, "step": 6270 }, { "epoch": 50.168, "grad_norm": 57.65912628173828, "learning_rate": 2.7715555555555557e-05, "loss": 1.0204, "step": 6271 }, { "epoch": 50.176, "grad_norm": 29.703332901000977, "learning_rate": 2.771111111111111e-05, "loss": 1.835, "step": 6272 }, { "epoch": 50.184, "grad_norm": 29.032691955566406, "learning_rate": 2.770666666666667e-05, "loss": 1.015, "step": 6273 }, { "epoch": 50.192, "grad_norm": 21.922565460205078, "learning_rate": 2.7702222222222225e-05, "loss": 1.5172, "step": 6274 }, { "epoch": 50.2, "grad_norm": 26.193212509155273, "learning_rate": 2.7697777777777776e-05, "loss": 0.7292, "step": 6275 }, { "epoch": 50.208, "grad_norm": 23.48659324645996, "learning_rate": 2.769333333333333e-05, "loss": 0.8382, "step": 6276 }, { "epoch": 50.216, "grad_norm": 44.55563735961914, "learning_rate": 2.7688888888888893e-05, "loss": 1.2714, "step": 6277 }, { "epoch": 50.224, "grad_norm": 23.128629684448242, "learning_rate": 2.7684444444444448e-05, "loss": 0.8271, "step": 6278 }, { "epoch": 50.232, "grad_norm": 34.91636276245117, "learning_rate": 2.768e-05, "loss": 0.7917, "step": 6279 }, { "epoch": 50.24, "grad_norm": 22.921283721923828, "learning_rate": 2.7675555555555554e-05, "loss": 1.4186, "step": 6280 }, { "epoch": 50.248, "grad_norm": 19.032075881958008, "learning_rate": 2.7671111111111116e-05, "loss": 1.0942, "step": 6281 }, { "epoch": 50.256, "grad_norm": 66.55319213867188, "learning_rate": 2.7666666666666667e-05, "loss": 0.6877, "step": 6282 }, { "epoch": 50.264, "grad_norm": 54.21054458618164, "learning_rate": 2.7662222222222222e-05, "loss": 1.6552, "step": 6283 }, { "epoch": 50.272, "grad_norm": 50.492889404296875, "learning_rate": 2.7657777777777777e-05, "loss": 0.8258, "step": 6284 }, { "epoch": 50.28, "grad_norm": 19.634050369262695, "learning_rate": 2.7653333333333335e-05, "loss": 1.2736, "step": 6285 }, { "epoch": 50.288, "grad_norm": 41.35161209106445, "learning_rate": 2.764888888888889e-05, "loss": 1.5717, "step": 6286 }, { "epoch": 50.296, "grad_norm": 25.92150115966797, "learning_rate": 2.7644444444444445e-05, "loss": 1.9087, "step": 6287 }, { "epoch": 50.304, "grad_norm": 15.873639106750488, "learning_rate": 2.764e-05, "loss": 1.3606, "step": 6288 }, { "epoch": 50.312, "grad_norm": 25.840085983276367, "learning_rate": 2.7635555555555558e-05, "loss": 1.4312, "step": 6289 }, { "epoch": 50.32, "grad_norm": 40.56279373168945, "learning_rate": 2.7631111111111113e-05, "loss": 0.9533, "step": 6290 }, { "epoch": 50.328, "grad_norm": 41.77764129638672, "learning_rate": 2.7626666666666668e-05, "loss": 1.0717, "step": 6291 }, { "epoch": 50.336, "grad_norm": 43.50884246826172, "learning_rate": 2.7622222222222222e-05, "loss": 1.247, "step": 6292 }, { "epoch": 50.344, "grad_norm": 18.30068588256836, "learning_rate": 2.761777777777778e-05, "loss": 0.7966, "step": 6293 }, { "epoch": 50.352, "grad_norm": 56.62064743041992, "learning_rate": 2.7613333333333335e-05, "loss": 1.266, "step": 6294 }, { "epoch": 50.36, "grad_norm": 25.336984634399414, "learning_rate": 2.760888888888889e-05, "loss": 0.7616, "step": 6295 }, { "epoch": 50.368, "grad_norm": 33.45497512817383, "learning_rate": 2.7604444444444445e-05, "loss": 0.8852, "step": 6296 }, { "epoch": 50.376, "grad_norm": 18.878562927246094, "learning_rate": 2.7600000000000003e-05, "loss": 0.9658, "step": 6297 }, { "epoch": 50.384, "grad_norm": 48.76700210571289, "learning_rate": 2.7595555555555558e-05, "loss": 1.0818, "step": 6298 }, { "epoch": 50.392, "grad_norm": 55.45981979370117, "learning_rate": 2.7591111111111113e-05, "loss": 1.0685, "step": 6299 }, { "epoch": 50.4, "grad_norm": 35.583614349365234, "learning_rate": 2.7586666666666665e-05, "loss": 0.9729, "step": 6300 }, { "epoch": 50.408, "grad_norm": 22.602867126464844, "learning_rate": 2.7582222222222226e-05, "loss": 1.4453, "step": 6301 }, { "epoch": 50.416, "grad_norm": 30.340681076049805, "learning_rate": 2.757777777777778e-05, "loss": 1.0581, "step": 6302 }, { "epoch": 50.424, "grad_norm": 21.35394859313965, "learning_rate": 2.7573333333333336e-05, "loss": 1.3188, "step": 6303 }, { "epoch": 50.432, "grad_norm": 24.669204711914062, "learning_rate": 2.7568888888888887e-05, "loss": 0.8089, "step": 6304 }, { "epoch": 50.44, "grad_norm": 21.8116512298584, "learning_rate": 2.756444444444445e-05, "loss": 0.9445, "step": 6305 }, { "epoch": 50.448, "grad_norm": 15.87878131866455, "learning_rate": 2.7560000000000004e-05, "loss": 1.1027, "step": 6306 }, { "epoch": 50.456, "grad_norm": 21.684345245361328, "learning_rate": 2.7555555555555555e-05, "loss": 1.0694, "step": 6307 }, { "epoch": 50.464, "grad_norm": 19.85137367248535, "learning_rate": 2.755111111111111e-05, "loss": 0.9138, "step": 6308 }, { "epoch": 50.472, "grad_norm": 25.072908401489258, "learning_rate": 2.7546666666666672e-05, "loss": 0.7529, "step": 6309 }, { "epoch": 50.48, "grad_norm": 60.59017562866211, "learning_rate": 2.7542222222222223e-05, "loss": 0.7265, "step": 6310 }, { "epoch": 50.488, "grad_norm": 28.851587295532227, "learning_rate": 2.7537777777777778e-05, "loss": 0.9775, "step": 6311 }, { "epoch": 50.496, "grad_norm": 22.576719284057617, "learning_rate": 2.7533333333333333e-05, "loss": 0.9834, "step": 6312 }, { "epoch": 50.504, "grad_norm": 48.35557174682617, "learning_rate": 2.7528888888888894e-05, "loss": 1.0163, "step": 6313 }, { "epoch": 50.512, "grad_norm": 24.774404525756836, "learning_rate": 2.7524444444444446e-05, "loss": 1.3383, "step": 6314 }, { "epoch": 50.52, "grad_norm": 61.91995620727539, "learning_rate": 2.752e-05, "loss": 0.8023, "step": 6315 }, { "epoch": 50.528, "grad_norm": 50.401336669921875, "learning_rate": 2.7515555555555556e-05, "loss": 1.0398, "step": 6316 }, { "epoch": 50.536, "grad_norm": 15.968892097473145, "learning_rate": 2.751111111111111e-05, "loss": 0.9482, "step": 6317 }, { "epoch": 50.544, "grad_norm": 21.0560302734375, "learning_rate": 2.750666666666667e-05, "loss": 1.1106, "step": 6318 }, { "epoch": 50.552, "grad_norm": 21.583921432495117, "learning_rate": 2.7502222222222224e-05, "loss": 0.8624, "step": 6319 }, { "epoch": 50.56, "grad_norm": 16.221641540527344, "learning_rate": 2.749777777777778e-05, "loss": 0.905, "step": 6320 }, { "epoch": 50.568, "grad_norm": 14.521453857421875, "learning_rate": 2.7493333333333333e-05, "loss": 0.7317, "step": 6321 }, { "epoch": 50.576, "grad_norm": 21.255054473876953, "learning_rate": 2.748888888888889e-05, "loss": 1.1086, "step": 6322 }, { "epoch": 50.584, "grad_norm": 34.82148742675781, "learning_rate": 2.7484444444444446e-05, "loss": 1.0817, "step": 6323 }, { "epoch": 50.592, "grad_norm": 25.48206329345703, "learning_rate": 2.748e-05, "loss": 1.1462, "step": 6324 }, { "epoch": 50.6, "grad_norm": 27.241214752197266, "learning_rate": 2.7475555555555556e-05, "loss": 1.2101, "step": 6325 }, { "epoch": 50.608, "grad_norm": 37.2017822265625, "learning_rate": 2.7471111111111114e-05, "loss": 1.2005, "step": 6326 }, { "epoch": 50.616, "grad_norm": 43.258056640625, "learning_rate": 2.746666666666667e-05, "loss": 1.0631, "step": 6327 }, { "epoch": 50.624, "grad_norm": 13.606701850891113, "learning_rate": 2.7462222222222224e-05, "loss": 0.8001, "step": 6328 }, { "epoch": 50.632, "grad_norm": 19.169343948364258, "learning_rate": 2.7457777777777775e-05, "loss": 0.8934, "step": 6329 }, { "epoch": 50.64, "grad_norm": 32.5294075012207, "learning_rate": 2.7453333333333337e-05, "loss": 0.8964, "step": 6330 }, { "epoch": 50.648, "grad_norm": 43.345619201660156, "learning_rate": 2.7448888888888892e-05, "loss": 1.0147, "step": 6331 }, { "epoch": 50.656, "grad_norm": 24.904048919677734, "learning_rate": 2.7444444444444443e-05, "loss": 0.9576, "step": 6332 }, { "epoch": 50.664, "grad_norm": 55.560340881347656, "learning_rate": 2.7439999999999998e-05, "loss": 1.468, "step": 6333 }, { "epoch": 50.672, "grad_norm": 43.311912536621094, "learning_rate": 2.743555555555556e-05, "loss": 0.9429, "step": 6334 }, { "epoch": 50.68, "grad_norm": 41.415218353271484, "learning_rate": 2.7431111111111115e-05, "loss": 1.3097, "step": 6335 }, { "epoch": 50.688, "grad_norm": 25.474929809570312, "learning_rate": 2.7426666666666666e-05, "loss": 1.0884, "step": 6336 }, { "epoch": 50.696, "grad_norm": 32.947994232177734, "learning_rate": 2.742222222222222e-05, "loss": 1.1959, "step": 6337 }, { "epoch": 50.704, "grad_norm": 38.22211456298828, "learning_rate": 2.7417777777777783e-05, "loss": 1.3493, "step": 6338 }, { "epoch": 50.712, "grad_norm": 36.79585647583008, "learning_rate": 2.7413333333333334e-05, "loss": 1.4784, "step": 6339 }, { "epoch": 50.72, "grad_norm": 31.829130172729492, "learning_rate": 2.740888888888889e-05, "loss": 2.7311, "step": 6340 }, { "epoch": 50.728, "grad_norm": 32.05205535888672, "learning_rate": 2.7404444444444444e-05, "loss": 2.6734, "step": 6341 }, { "epoch": 50.736, "grad_norm": 26.09864044189453, "learning_rate": 2.7400000000000002e-05, "loss": 1.3678, "step": 6342 }, { "epoch": 50.744, "grad_norm": 19.157737731933594, "learning_rate": 2.7395555555555557e-05, "loss": 0.645, "step": 6343 }, { "epoch": 50.752, "grad_norm": 30.090883255004883, "learning_rate": 2.739111111111111e-05, "loss": 1.0901, "step": 6344 }, { "epoch": 50.76, "grad_norm": 25.700220108032227, "learning_rate": 2.7386666666666666e-05, "loss": 1.0776, "step": 6345 }, { "epoch": 50.768, "grad_norm": 48.589996337890625, "learning_rate": 2.7382222222222225e-05, "loss": 1.0106, "step": 6346 }, { "epoch": 50.776, "grad_norm": 18.376367568969727, "learning_rate": 2.737777777777778e-05, "loss": 0.8256, "step": 6347 }, { "epoch": 50.784, "grad_norm": 20.85787582397461, "learning_rate": 2.7373333333333334e-05, "loss": 0.9202, "step": 6348 }, { "epoch": 50.792, "grad_norm": 27.7424259185791, "learning_rate": 2.736888888888889e-05, "loss": 1.1634, "step": 6349 }, { "epoch": 50.8, "grad_norm": 48.70969009399414, "learning_rate": 2.7364444444444447e-05, "loss": 0.82, "step": 6350 }, { "epoch": 50.808, "grad_norm": 25.441659927368164, "learning_rate": 2.7360000000000002e-05, "loss": 0.7025, "step": 6351 }, { "epoch": 50.816, "grad_norm": 19.283517837524414, "learning_rate": 2.7355555555555557e-05, "loss": 0.7915, "step": 6352 }, { "epoch": 50.824, "grad_norm": 16.933530807495117, "learning_rate": 2.7351111111111112e-05, "loss": 0.7983, "step": 6353 }, { "epoch": 50.832, "grad_norm": 24.05413818359375, "learning_rate": 2.734666666666667e-05, "loss": 1.1608, "step": 6354 }, { "epoch": 50.84, "grad_norm": 19.024272918701172, "learning_rate": 2.7342222222222225e-05, "loss": 1.3931, "step": 6355 }, { "epoch": 50.848, "grad_norm": 16.039003372192383, "learning_rate": 2.733777777777778e-05, "loss": 1.1031, "step": 6356 }, { "epoch": 50.856, "grad_norm": 25.50886344909668, "learning_rate": 2.733333333333333e-05, "loss": 1.1382, "step": 6357 }, { "epoch": 50.864, "grad_norm": 42.30284881591797, "learning_rate": 2.7328888888888893e-05, "loss": 1.227, "step": 6358 }, { "epoch": 50.872, "grad_norm": 70.09373474121094, "learning_rate": 2.7324444444444448e-05, "loss": 1.4993, "step": 6359 }, { "epoch": 50.88, "grad_norm": 27.155258178710938, "learning_rate": 2.7320000000000003e-05, "loss": 1.1114, "step": 6360 }, { "epoch": 50.888, "grad_norm": 25.948734283447266, "learning_rate": 2.7315555555555554e-05, "loss": 1.655, "step": 6361 }, { "epoch": 50.896, "grad_norm": 35.97212600708008, "learning_rate": 2.7311111111111116e-05, "loss": 0.6396, "step": 6362 }, { "epoch": 50.904, "grad_norm": 43.98115158081055, "learning_rate": 2.730666666666667e-05, "loss": 1.0226, "step": 6363 }, { "epoch": 50.912, "grad_norm": 48.009315490722656, "learning_rate": 2.7302222222222222e-05, "loss": 0.6681, "step": 6364 }, { "epoch": 50.92, "grad_norm": 30.944047927856445, "learning_rate": 2.7297777777777777e-05, "loss": 0.981, "step": 6365 }, { "epoch": 50.928, "grad_norm": 29.956937789916992, "learning_rate": 2.7293333333333332e-05, "loss": 0.9375, "step": 6366 }, { "epoch": 50.936, "grad_norm": 37.60213851928711, "learning_rate": 2.728888888888889e-05, "loss": 0.9613, "step": 6367 }, { "epoch": 50.944, "grad_norm": 18.789180755615234, "learning_rate": 2.7284444444444445e-05, "loss": 0.9951, "step": 6368 }, { "epoch": 50.952, "grad_norm": 23.7203369140625, "learning_rate": 2.728e-05, "loss": 1.0872, "step": 6369 }, { "epoch": 50.96, "grad_norm": 25.88899040222168, "learning_rate": 2.7275555555555555e-05, "loss": 0.8032, "step": 6370 }, { "epoch": 50.968, "grad_norm": 28.110885620117188, "learning_rate": 2.7271111111111113e-05, "loss": 1.1163, "step": 6371 }, { "epoch": 50.976, "grad_norm": 34.71344757080078, "learning_rate": 2.7266666666666668e-05, "loss": 0.876, "step": 6372 }, { "epoch": 50.984, "grad_norm": 17.00787353515625, "learning_rate": 2.7262222222222222e-05, "loss": 1.1512, "step": 6373 }, { "epoch": 50.992, "grad_norm": 26.941051483154297, "learning_rate": 2.7257777777777777e-05, "loss": 1.0336, "step": 6374 }, { "epoch": 51.0, "grad_norm": 23.060462951660156, "learning_rate": 2.7253333333333336e-05, "loss": 1.0617, "step": 6375 }, { "epoch": 51.0, "eval_loss": 1.1056114435195923, "eval_map": 0.3867, "eval_map_50": 0.7481, "eval_map_75": 0.3435, "eval_map_Coverall": 0.6023, "eval_map_Face_Shield": 0.4358, "eval_map_Gloves": 0.3194, "eval_map_Goggles": 0.2004, "eval_map_Mask": 0.3759, "eval_map_large": 0.5765, "eval_map_medium": 0.2561, "eval_map_small": 0.2898, "eval_mar_1": 0.3046, "eval_mar_10": 0.5191, "eval_mar_100": 0.5344, "eval_mar_100_Coverall": 0.7244, "eval_mar_100_Face_Shield": 0.6176, "eval_mar_100_Gloves": 0.4344, "eval_mar_100_Goggles": 0.4281, "eval_mar_100_Mask": 0.4673, "eval_mar_large": 0.6918, "eval_mar_medium": 0.3971, "eval_mar_small": 0.3431, "eval_runtime": 0.9436, "eval_samples_per_second": 30.732, "eval_steps_per_second": 2.119, "step": 6375 }, { "epoch": 51.008, "grad_norm": 47.9997444152832, "learning_rate": 2.724888888888889e-05, "loss": 0.8148, "step": 6376 }, { "epoch": 51.016, "grad_norm": 44.22513961791992, "learning_rate": 2.7244444444444445e-05, "loss": 1.0074, "step": 6377 }, { "epoch": 51.024, "grad_norm": 19.29553985595703, "learning_rate": 2.724e-05, "loss": 0.9152, "step": 6378 }, { "epoch": 51.032, "grad_norm": 17.849210739135742, "learning_rate": 2.723555555555556e-05, "loss": 0.9862, "step": 6379 }, { "epoch": 51.04, "grad_norm": 22.985811233520508, "learning_rate": 2.7231111111111113e-05, "loss": 1.416, "step": 6380 }, { "epoch": 51.048, "grad_norm": 22.83580207824707, "learning_rate": 2.7226666666666668e-05, "loss": 1.1568, "step": 6381 }, { "epoch": 51.056, "grad_norm": 31.129262924194336, "learning_rate": 2.7222222222222223e-05, "loss": 1.1257, "step": 6382 }, { "epoch": 51.064, "grad_norm": 40.482906341552734, "learning_rate": 2.721777777777778e-05, "loss": 0.9875, "step": 6383 }, { "epoch": 51.072, "grad_norm": 29.288227081298828, "learning_rate": 2.7213333333333336e-05, "loss": 1.1257, "step": 6384 }, { "epoch": 51.08, "grad_norm": 42.996543884277344, "learning_rate": 2.720888888888889e-05, "loss": 1.1686, "step": 6385 }, { "epoch": 51.088, "grad_norm": 150.55380249023438, "learning_rate": 2.7204444444444442e-05, "loss": 0.9854, "step": 6386 }, { "epoch": 51.096, "grad_norm": 25.53453826904297, "learning_rate": 2.7200000000000004e-05, "loss": 1.2459, "step": 6387 }, { "epoch": 51.104, "grad_norm": 18.693387985229492, "learning_rate": 2.719555555555556e-05, "loss": 1.263, "step": 6388 }, { "epoch": 51.112, "grad_norm": 31.560291290283203, "learning_rate": 2.719111111111111e-05, "loss": 0.8942, "step": 6389 }, { "epoch": 51.12, "grad_norm": 50.795223236083984, "learning_rate": 2.7186666666666665e-05, "loss": 0.9814, "step": 6390 }, { "epoch": 51.128, "grad_norm": 29.12008285522461, "learning_rate": 2.7182222222222227e-05, "loss": 1.0223, "step": 6391 }, { "epoch": 51.136, "grad_norm": 21.42455291748047, "learning_rate": 2.717777777777778e-05, "loss": 1.0295, "step": 6392 }, { "epoch": 51.144, "grad_norm": 55.88496017456055, "learning_rate": 2.7173333333333333e-05, "loss": 0.9766, "step": 6393 }, { "epoch": 51.152, "grad_norm": 37.14048385620117, "learning_rate": 2.7168888888888888e-05, "loss": 0.9859, "step": 6394 }, { "epoch": 51.16, "grad_norm": 20.112342834472656, "learning_rate": 2.716444444444445e-05, "loss": 1.3664, "step": 6395 }, { "epoch": 51.168, "grad_norm": 23.809141159057617, "learning_rate": 2.716e-05, "loss": 1.137, "step": 6396 }, { "epoch": 51.176, "grad_norm": 25.1406192779541, "learning_rate": 2.7155555555555556e-05, "loss": 1.0355, "step": 6397 }, { "epoch": 51.184, "grad_norm": 21.24336814880371, "learning_rate": 2.715111111111111e-05, "loss": 0.7113, "step": 6398 }, { "epoch": 51.192, "grad_norm": 40.29545593261719, "learning_rate": 2.714666666666667e-05, "loss": 1.0548, "step": 6399 }, { "epoch": 51.2, "grad_norm": 16.810346603393555, "learning_rate": 2.7142222222222224e-05, "loss": 0.7987, "step": 6400 }, { "epoch": 51.208, "grad_norm": 35.80803680419922, "learning_rate": 2.713777777777778e-05, "loss": 1.1168, "step": 6401 }, { "epoch": 51.216, "grad_norm": 40.929691314697266, "learning_rate": 2.7133333333333333e-05, "loss": 1.0839, "step": 6402 }, { "epoch": 51.224, "grad_norm": 24.128326416015625, "learning_rate": 2.712888888888889e-05, "loss": 1.8309, "step": 6403 }, { "epoch": 51.232, "grad_norm": 23.547576904296875, "learning_rate": 2.7124444444444446e-05, "loss": 0.949, "step": 6404 }, { "epoch": 51.24, "grad_norm": 25.213191986083984, "learning_rate": 2.712e-05, "loss": 1.2866, "step": 6405 }, { "epoch": 51.248, "grad_norm": 29.099760055541992, "learning_rate": 2.7115555555555556e-05, "loss": 0.9981, "step": 6406 }, { "epoch": 51.256, "grad_norm": 23.193660736083984, "learning_rate": 2.7111111111111114e-05, "loss": 0.9829, "step": 6407 }, { "epoch": 51.264, "grad_norm": 17.854127883911133, "learning_rate": 2.710666666666667e-05, "loss": 1.0138, "step": 6408 }, { "epoch": 51.272, "grad_norm": 50.639591217041016, "learning_rate": 2.7102222222222224e-05, "loss": 2.0505, "step": 6409 }, { "epoch": 51.28, "grad_norm": 28.589805603027344, "learning_rate": 2.709777777777778e-05, "loss": 0.8864, "step": 6410 }, { "epoch": 51.288, "grad_norm": 15.556669235229492, "learning_rate": 2.7093333333333337e-05, "loss": 1.8276, "step": 6411 }, { "epoch": 51.296, "grad_norm": 26.900014877319336, "learning_rate": 2.7088888888888892e-05, "loss": 1.3214, "step": 6412 }, { "epoch": 51.304, "grad_norm": 35.60749816894531, "learning_rate": 2.7084444444444447e-05, "loss": 1.7299, "step": 6413 }, { "epoch": 51.312, "grad_norm": 48.2669563293457, "learning_rate": 2.7079999999999998e-05, "loss": 1.1045, "step": 6414 }, { "epoch": 51.32, "grad_norm": 35.663238525390625, "learning_rate": 2.707555555555556e-05, "loss": 1.564, "step": 6415 }, { "epoch": 51.328, "grad_norm": 43.01108932495117, "learning_rate": 2.7071111111111115e-05, "loss": 1.8848, "step": 6416 }, { "epoch": 51.336, "grad_norm": 14.976210594177246, "learning_rate": 2.706666666666667e-05, "loss": 1.2778, "step": 6417 }, { "epoch": 51.344, "grad_norm": 27.6301212310791, "learning_rate": 2.706222222222222e-05, "loss": 1.0529, "step": 6418 }, { "epoch": 51.352, "grad_norm": 21.302743911743164, "learning_rate": 2.7057777777777776e-05, "loss": 0.7891, "step": 6419 }, { "epoch": 51.36, "grad_norm": 19.58111000061035, "learning_rate": 2.7053333333333337e-05, "loss": 1.0988, "step": 6420 }, { "epoch": 51.368, "grad_norm": 17.660888671875, "learning_rate": 2.704888888888889e-05, "loss": 1.1218, "step": 6421 }, { "epoch": 51.376, "grad_norm": 25.837989807128906, "learning_rate": 2.7044444444444444e-05, "loss": 1.1248, "step": 6422 }, { "epoch": 51.384, "grad_norm": 24.993167877197266, "learning_rate": 2.704e-05, "loss": 1.1418, "step": 6423 }, { "epoch": 51.392, "grad_norm": 19.76505470275879, "learning_rate": 2.7035555555555557e-05, "loss": 0.9333, "step": 6424 }, { "epoch": 51.4, "grad_norm": 26.46027946472168, "learning_rate": 2.703111111111111e-05, "loss": 1.0012, "step": 6425 }, { "epoch": 51.408, "grad_norm": 16.065263748168945, "learning_rate": 2.7026666666666667e-05, "loss": 1.0602, "step": 6426 }, { "epoch": 51.416, "grad_norm": 21.094547271728516, "learning_rate": 2.702222222222222e-05, "loss": 0.9267, "step": 6427 }, { "epoch": 51.424, "grad_norm": 24.284198760986328, "learning_rate": 2.701777777777778e-05, "loss": 1.0207, "step": 6428 }, { "epoch": 51.432, "grad_norm": 49.99892807006836, "learning_rate": 2.7013333333333334e-05, "loss": 0.9136, "step": 6429 }, { "epoch": 51.44, "grad_norm": 19.61775779724121, "learning_rate": 2.700888888888889e-05, "loss": 0.9568, "step": 6430 }, { "epoch": 51.448, "grad_norm": 29.270015716552734, "learning_rate": 2.7004444444444444e-05, "loss": 0.8764, "step": 6431 }, { "epoch": 51.456, "grad_norm": 25.807493209838867, "learning_rate": 2.7000000000000002e-05, "loss": 0.8687, "step": 6432 }, { "epoch": 51.464, "grad_norm": 31.81413459777832, "learning_rate": 2.6995555555555557e-05, "loss": 0.6501, "step": 6433 }, { "epoch": 51.472, "grad_norm": 36.15186309814453, "learning_rate": 2.6991111111111112e-05, "loss": 1.1218, "step": 6434 }, { "epoch": 51.48, "grad_norm": 33.256378173828125, "learning_rate": 2.6986666666666667e-05, "loss": 1.008, "step": 6435 }, { "epoch": 51.488, "grad_norm": 20.1466121673584, "learning_rate": 2.6982222222222225e-05, "loss": 1.2134, "step": 6436 }, { "epoch": 51.496, "grad_norm": 20.188478469848633, "learning_rate": 2.697777777777778e-05, "loss": 0.7904, "step": 6437 }, { "epoch": 51.504, "grad_norm": 60.86931228637695, "learning_rate": 2.6973333333333335e-05, "loss": 0.9698, "step": 6438 }, { "epoch": 51.512, "grad_norm": 21.422733306884766, "learning_rate": 2.6968888888888886e-05, "loss": 0.7197, "step": 6439 }, { "epoch": 51.52, "grad_norm": 22.489477157592773, "learning_rate": 2.6964444444444448e-05, "loss": 0.8842, "step": 6440 }, { "epoch": 51.528, "grad_norm": 39.93143081665039, "learning_rate": 2.6960000000000003e-05, "loss": 0.9935, "step": 6441 }, { "epoch": 51.536, "grad_norm": 37.69254684448242, "learning_rate": 2.6955555555555558e-05, "loss": 0.8488, "step": 6442 }, { "epoch": 51.544, "grad_norm": 23.889432907104492, "learning_rate": 2.695111111111111e-05, "loss": 0.7813, "step": 6443 }, { "epoch": 51.552, "grad_norm": 22.4487361907959, "learning_rate": 2.694666666666667e-05, "loss": 1.0823, "step": 6444 }, { "epoch": 51.56, "grad_norm": 77.10050201416016, "learning_rate": 2.6942222222222226e-05, "loss": 1.1513, "step": 6445 }, { "epoch": 51.568, "grad_norm": 43.20615005493164, "learning_rate": 2.6937777777777777e-05, "loss": 1.1603, "step": 6446 }, { "epoch": 51.576, "grad_norm": 161.40013122558594, "learning_rate": 2.6933333333333332e-05, "loss": 0.9462, "step": 6447 }, { "epoch": 51.584, "grad_norm": 45.880889892578125, "learning_rate": 2.6928888888888893e-05, "loss": 0.886, "step": 6448 }, { "epoch": 51.592, "grad_norm": 40.791015625, "learning_rate": 2.6924444444444445e-05, "loss": 0.8305, "step": 6449 }, { "epoch": 51.6, "grad_norm": 18.543115615844727, "learning_rate": 2.692e-05, "loss": 0.9813, "step": 6450 }, { "epoch": 51.608, "grad_norm": 72.2219009399414, "learning_rate": 2.6915555555555555e-05, "loss": 1.061, "step": 6451 }, { "epoch": 51.616, "grad_norm": 19.097043991088867, "learning_rate": 2.6911111111111116e-05, "loss": 0.7578, "step": 6452 }, { "epoch": 51.624, "grad_norm": 38.7542724609375, "learning_rate": 2.6906666666666668e-05, "loss": 1.0279, "step": 6453 }, { "epoch": 51.632, "grad_norm": 33.09834289550781, "learning_rate": 2.6902222222222223e-05, "loss": 1.2038, "step": 6454 }, { "epoch": 51.64, "grad_norm": 25.637004852294922, "learning_rate": 2.6897777777777777e-05, "loss": 1.1153, "step": 6455 }, { "epoch": 51.648, "grad_norm": 34.21079635620117, "learning_rate": 2.6893333333333336e-05, "loss": 0.8178, "step": 6456 }, { "epoch": 51.656, "grad_norm": 12.682636260986328, "learning_rate": 2.688888888888889e-05, "loss": 0.8357, "step": 6457 }, { "epoch": 51.664, "grad_norm": 27.707761764526367, "learning_rate": 2.6884444444444445e-05, "loss": 0.951, "step": 6458 }, { "epoch": 51.672, "grad_norm": 34.01332092285156, "learning_rate": 2.688e-05, "loss": 1.0305, "step": 6459 }, { "epoch": 51.68, "grad_norm": 32.357791900634766, "learning_rate": 2.687555555555556e-05, "loss": 1.1384, "step": 6460 }, { "epoch": 51.688, "grad_norm": 27.50533676147461, "learning_rate": 2.6871111111111113e-05, "loss": 1.0838, "step": 6461 }, { "epoch": 51.696, "grad_norm": 29.395212173461914, "learning_rate": 2.6866666666666668e-05, "loss": 0.989, "step": 6462 }, { "epoch": 51.704, "grad_norm": 83.30741882324219, "learning_rate": 2.6862222222222223e-05, "loss": 1.1469, "step": 6463 }, { "epoch": 51.712, "grad_norm": 15.770444869995117, "learning_rate": 2.685777777777778e-05, "loss": 0.7059, "step": 6464 }, { "epoch": 51.72, "grad_norm": 22.40885353088379, "learning_rate": 2.6853333333333336e-05, "loss": 0.8296, "step": 6465 }, { "epoch": 51.728, "grad_norm": 45.31441879272461, "learning_rate": 2.684888888888889e-05, "loss": 0.8045, "step": 6466 }, { "epoch": 51.736, "grad_norm": 26.288331985473633, "learning_rate": 2.6844444444444446e-05, "loss": 1.6105, "step": 6467 }, { "epoch": 51.744, "grad_norm": 87.79457092285156, "learning_rate": 2.6840000000000004e-05, "loss": 0.984, "step": 6468 }, { "epoch": 51.752, "grad_norm": 26.062255859375, "learning_rate": 2.683555555555556e-05, "loss": 0.9335, "step": 6469 }, { "epoch": 51.76, "grad_norm": 43.36052703857422, "learning_rate": 2.6831111111111114e-05, "loss": 1.3243, "step": 6470 }, { "epoch": 51.768, "grad_norm": 64.29241180419922, "learning_rate": 2.6826666666666665e-05, "loss": 0.853, "step": 6471 }, { "epoch": 51.776, "grad_norm": 74.42399597167969, "learning_rate": 2.682222222222222e-05, "loss": 3.0827, "step": 6472 }, { "epoch": 51.784, "grad_norm": 78.67758178710938, "learning_rate": 2.681777777777778e-05, "loss": 0.9755, "step": 6473 }, { "epoch": 51.792, "grad_norm": 107.73612213134766, "learning_rate": 2.6813333333333336e-05, "loss": 1.0094, "step": 6474 }, { "epoch": 51.8, "grad_norm": 41.311038970947266, "learning_rate": 2.6808888888888888e-05, "loss": 0.9746, "step": 6475 }, { "epoch": 51.808, "grad_norm": 46.82920455932617, "learning_rate": 2.6804444444444443e-05, "loss": 0.9969, "step": 6476 }, { "epoch": 51.816, "grad_norm": 19.438528060913086, "learning_rate": 2.6800000000000004e-05, "loss": 0.9938, "step": 6477 }, { "epoch": 51.824, "grad_norm": 25.658620834350586, "learning_rate": 2.6795555555555556e-05, "loss": 0.8514, "step": 6478 }, { "epoch": 51.832, "grad_norm": 39.912574768066406, "learning_rate": 2.679111111111111e-05, "loss": 1.2856, "step": 6479 }, { "epoch": 51.84, "grad_norm": 27.669233322143555, "learning_rate": 2.6786666666666665e-05, "loss": 0.873, "step": 6480 }, { "epoch": 51.848, "grad_norm": 56.08305358886719, "learning_rate": 2.6782222222222224e-05, "loss": 0.955, "step": 6481 }, { "epoch": 51.856, "grad_norm": 20.96050453186035, "learning_rate": 2.677777777777778e-05, "loss": 1.2719, "step": 6482 }, { "epoch": 51.864, "grad_norm": 25.826404571533203, "learning_rate": 2.6773333333333333e-05, "loss": 0.7233, "step": 6483 }, { "epoch": 51.872, "grad_norm": 18.35379409790039, "learning_rate": 2.6768888888888888e-05, "loss": 1.6202, "step": 6484 }, { "epoch": 51.88, "grad_norm": 14.534741401672363, "learning_rate": 2.6764444444444446e-05, "loss": 1.1605, "step": 6485 }, { "epoch": 51.888, "grad_norm": 18.822330474853516, "learning_rate": 2.676e-05, "loss": 1.0272, "step": 6486 }, { "epoch": 51.896, "grad_norm": 22.103961944580078, "learning_rate": 2.6755555555555556e-05, "loss": 0.7506, "step": 6487 }, { "epoch": 51.904, "grad_norm": 39.364933013916016, "learning_rate": 2.675111111111111e-05, "loss": 1.2747, "step": 6488 }, { "epoch": 51.912, "grad_norm": 22.901044845581055, "learning_rate": 2.674666666666667e-05, "loss": 1.0938, "step": 6489 }, { "epoch": 51.92, "grad_norm": 27.880523681640625, "learning_rate": 2.6742222222222224e-05, "loss": 0.7798, "step": 6490 }, { "epoch": 51.928, "grad_norm": 16.59009552001953, "learning_rate": 2.673777777777778e-05, "loss": 0.9486, "step": 6491 }, { "epoch": 51.936, "grad_norm": 25.0008602142334, "learning_rate": 2.6733333333333334e-05, "loss": 1.1605, "step": 6492 }, { "epoch": 51.944, "grad_norm": 26.513778686523438, "learning_rate": 2.6728888888888892e-05, "loss": 0.8735, "step": 6493 }, { "epoch": 51.952, "grad_norm": 15.427002906799316, "learning_rate": 2.6724444444444447e-05, "loss": 1.3105, "step": 6494 }, { "epoch": 51.96, "grad_norm": 20.646991729736328, "learning_rate": 2.672e-05, "loss": 0.8511, "step": 6495 }, { "epoch": 51.968, "grad_norm": 30.712251663208008, "learning_rate": 2.6715555555555553e-05, "loss": 2.1482, "step": 6496 }, { "epoch": 51.976, "grad_norm": 31.354183197021484, "learning_rate": 2.6711111111111115e-05, "loss": 0.9071, "step": 6497 }, { "epoch": 51.984, "grad_norm": 36.05797576904297, "learning_rate": 2.670666666666667e-05, "loss": 0.8745, "step": 6498 }, { "epoch": 51.992, "grad_norm": 21.028705596923828, "learning_rate": 2.6702222222222224e-05, "loss": 0.8526, "step": 6499 }, { "epoch": 52.0, "grad_norm": 36.91780471801758, "learning_rate": 2.6697777777777776e-05, "loss": 1.1671, "step": 6500 }, { "epoch": 52.0, "eval_loss": 1.1407557725906372, "eval_map": 0.3708, "eval_map_50": 0.7141, "eval_map_75": 0.3301, "eval_map_Coverall": 0.621, "eval_map_Face_Shield": 0.4325, "eval_map_Gloves": 0.2918, "eval_map_Goggles": 0.1575, "eval_map_Mask": 0.3512, "eval_map_large": 0.6183, "eval_map_medium": 0.2448, "eval_map_small": 0.3055, "eval_mar_1": 0.2884, "eval_mar_10": 0.5334, "eval_mar_100": 0.5511, "eval_mar_100_Coverall": 0.76, "eval_mar_100_Face_Shield": 0.6941, "eval_mar_100_Gloves": 0.4098, "eval_mar_100_Goggles": 0.4375, "eval_mar_100_Mask": 0.4538, "eval_mar_large": 0.7362, "eval_mar_medium": 0.4174, "eval_mar_small": 0.3611, "eval_runtime": 0.9354, "eval_samples_per_second": 31.002, "eval_steps_per_second": 2.138, "step": 6500 }, { "epoch": 52.008, "grad_norm": 38.870418548583984, "learning_rate": 2.6693333333333338e-05, "loss": 2.1787, "step": 6501 }, { "epoch": 52.016, "grad_norm": 38.68275833129883, "learning_rate": 2.6688888888888892e-05, "loss": 1.4321, "step": 6502 }, { "epoch": 52.024, "grad_norm": 25.64080047607422, "learning_rate": 2.6684444444444444e-05, "loss": 0.8804, "step": 6503 }, { "epoch": 52.032, "grad_norm": 26.487974166870117, "learning_rate": 2.668e-05, "loss": 0.9484, "step": 6504 }, { "epoch": 52.04, "grad_norm": 23.67596435546875, "learning_rate": 2.667555555555556e-05, "loss": 0.7064, "step": 6505 }, { "epoch": 52.048, "grad_norm": 17.33643913269043, "learning_rate": 2.6671111111111112e-05, "loss": 0.9058, "step": 6506 }, { "epoch": 52.056, "grad_norm": 16.951730728149414, "learning_rate": 2.6666666666666667e-05, "loss": 1.3086, "step": 6507 }, { "epoch": 52.064, "grad_norm": 100.9316635131836, "learning_rate": 2.666222222222222e-05, "loss": 1.0942, "step": 6508 }, { "epoch": 52.072, "grad_norm": 22.394977569580078, "learning_rate": 2.6657777777777783e-05, "loss": 0.6156, "step": 6509 }, { "epoch": 52.08, "grad_norm": 16.12751579284668, "learning_rate": 2.6653333333333335e-05, "loss": 0.8051, "step": 6510 }, { "epoch": 52.088, "grad_norm": 36.25572967529297, "learning_rate": 2.664888888888889e-05, "loss": 1.8912, "step": 6511 }, { "epoch": 52.096, "grad_norm": 39.32273864746094, "learning_rate": 2.6644444444444444e-05, "loss": 0.674, "step": 6512 }, { "epoch": 52.104, "grad_norm": 46.173343658447266, "learning_rate": 2.6640000000000002e-05, "loss": 0.7532, "step": 6513 }, { "epoch": 52.112, "grad_norm": 23.957889556884766, "learning_rate": 2.6635555555555557e-05, "loss": 1.2369, "step": 6514 }, { "epoch": 52.12, "grad_norm": 20.07785987854004, "learning_rate": 2.6631111111111112e-05, "loss": 1.0996, "step": 6515 }, { "epoch": 52.128, "grad_norm": 20.75322151184082, "learning_rate": 2.6626666666666667e-05, "loss": 0.8237, "step": 6516 }, { "epoch": 52.136, "grad_norm": 23.455442428588867, "learning_rate": 2.6622222222222225e-05, "loss": 0.7557, "step": 6517 }, { "epoch": 52.144, "grad_norm": 25.84341049194336, "learning_rate": 2.661777777777778e-05, "loss": 1.4873, "step": 6518 }, { "epoch": 52.152, "grad_norm": 26.120262145996094, "learning_rate": 2.6613333333333335e-05, "loss": 0.9049, "step": 6519 }, { "epoch": 52.16, "grad_norm": 14.49306583404541, "learning_rate": 2.660888888888889e-05, "loss": 1.2054, "step": 6520 }, { "epoch": 52.168, "grad_norm": 341.4832763671875, "learning_rate": 2.6604444444444445e-05, "loss": 0.8685, "step": 6521 }, { "epoch": 52.176, "grad_norm": 33.55988693237305, "learning_rate": 2.6600000000000003e-05, "loss": 1.1763, "step": 6522 }, { "epoch": 52.184, "grad_norm": 23.77373695373535, "learning_rate": 2.6595555555555558e-05, "loss": 0.8332, "step": 6523 }, { "epoch": 52.192, "grad_norm": 55.76877212524414, "learning_rate": 2.6591111111111113e-05, "loss": 0.9147, "step": 6524 }, { "epoch": 52.2, "grad_norm": 23.315885543823242, "learning_rate": 2.6586666666666664e-05, "loss": 1.0185, "step": 6525 }, { "epoch": 52.208, "grad_norm": 26.214195251464844, "learning_rate": 2.6582222222222226e-05, "loss": 0.9321, "step": 6526 }, { "epoch": 52.216, "grad_norm": 26.74506378173828, "learning_rate": 2.657777777777778e-05, "loss": 0.9908, "step": 6527 }, { "epoch": 52.224, "grad_norm": 16.08954620361328, "learning_rate": 2.6573333333333332e-05, "loss": 0.8588, "step": 6528 }, { "epoch": 52.232, "grad_norm": 31.933256149291992, "learning_rate": 2.6568888888888887e-05, "loss": 0.8342, "step": 6529 }, { "epoch": 52.24, "grad_norm": 43.48454284667969, "learning_rate": 2.656444444444445e-05, "loss": 1.2917, "step": 6530 }, { "epoch": 52.248, "grad_norm": 19.404911041259766, "learning_rate": 2.6560000000000003e-05, "loss": 1.0278, "step": 6531 }, { "epoch": 52.256, "grad_norm": 26.531694412231445, "learning_rate": 2.6555555555555555e-05, "loss": 0.9565, "step": 6532 }, { "epoch": 52.264, "grad_norm": 37.60585403442383, "learning_rate": 2.655111111111111e-05, "loss": 0.9299, "step": 6533 }, { "epoch": 52.272, "grad_norm": 47.100032806396484, "learning_rate": 2.654666666666667e-05, "loss": 1.5935, "step": 6534 }, { "epoch": 52.28, "grad_norm": 40.350257873535156, "learning_rate": 2.6542222222222223e-05, "loss": 1.0757, "step": 6535 }, { "epoch": 52.288, "grad_norm": 26.318723678588867, "learning_rate": 2.6537777777777777e-05, "loss": 1.2149, "step": 6536 }, { "epoch": 52.296, "grad_norm": 50.25027847290039, "learning_rate": 2.6533333333333332e-05, "loss": 1.2658, "step": 6537 }, { "epoch": 52.304, "grad_norm": 25.2166805267334, "learning_rate": 2.652888888888889e-05, "loss": 1.0637, "step": 6538 }, { "epoch": 52.312, "grad_norm": 38.808387756347656, "learning_rate": 2.6524444444444445e-05, "loss": 0.8122, "step": 6539 }, { "epoch": 52.32, "grad_norm": 17.975830078125, "learning_rate": 2.652e-05, "loss": 1.1385, "step": 6540 }, { "epoch": 52.328, "grad_norm": 24.076501846313477, "learning_rate": 2.6515555555555555e-05, "loss": 0.9982, "step": 6541 }, { "epoch": 52.336, "grad_norm": 59.32272720336914, "learning_rate": 2.6511111111111113e-05, "loss": 1.2424, "step": 6542 }, { "epoch": 52.344, "grad_norm": 24.932586669921875, "learning_rate": 2.6506666666666668e-05, "loss": 0.8892, "step": 6543 }, { "epoch": 52.352, "grad_norm": 26.76793098449707, "learning_rate": 2.6502222222222223e-05, "loss": 0.8296, "step": 6544 }, { "epoch": 52.36, "grad_norm": 32.31773376464844, "learning_rate": 2.6497777777777778e-05, "loss": 0.7083, "step": 6545 }, { "epoch": 52.368, "grad_norm": 22.89522933959961, "learning_rate": 2.6493333333333336e-05, "loss": 1.1692, "step": 6546 }, { "epoch": 52.376, "grad_norm": 30.323484420776367, "learning_rate": 2.648888888888889e-05, "loss": 0.7569, "step": 6547 }, { "epoch": 52.384, "grad_norm": 20.797874450683594, "learning_rate": 2.6484444444444446e-05, "loss": 0.9184, "step": 6548 }, { "epoch": 52.392, "grad_norm": 30.43157196044922, "learning_rate": 2.648e-05, "loss": 1.3677, "step": 6549 }, { "epoch": 52.4, "grad_norm": 28.192262649536133, "learning_rate": 2.647555555555556e-05, "loss": 0.8744, "step": 6550 }, { "epoch": 52.408, "grad_norm": 22.67184066772461, "learning_rate": 2.6471111111111114e-05, "loss": 0.9021, "step": 6551 }, { "epoch": 52.416, "grad_norm": 39.97927474975586, "learning_rate": 2.646666666666667e-05, "loss": 1.9252, "step": 6552 }, { "epoch": 52.424, "grad_norm": 18.170434951782227, "learning_rate": 2.646222222222222e-05, "loss": 0.7101, "step": 6553 }, { "epoch": 52.432, "grad_norm": 18.29554557800293, "learning_rate": 2.645777777777778e-05, "loss": 0.9502, "step": 6554 }, { "epoch": 52.44, "grad_norm": 26.97386360168457, "learning_rate": 2.6453333333333336e-05, "loss": 1.0905, "step": 6555 }, { "epoch": 52.448, "grad_norm": 27.364334106445312, "learning_rate": 2.644888888888889e-05, "loss": 1.3882, "step": 6556 }, { "epoch": 52.456, "grad_norm": 16.623964309692383, "learning_rate": 2.6444444444444443e-05, "loss": 0.8899, "step": 6557 }, { "epoch": 52.464, "grad_norm": 17.460224151611328, "learning_rate": 2.6440000000000004e-05, "loss": 0.7318, "step": 6558 }, { "epoch": 52.472, "grad_norm": 16.288562774658203, "learning_rate": 2.643555555555556e-05, "loss": 0.9505, "step": 6559 }, { "epoch": 52.48, "grad_norm": 43.87291717529297, "learning_rate": 2.643111111111111e-05, "loss": 0.7896, "step": 6560 }, { "epoch": 52.488, "grad_norm": 19.8505916595459, "learning_rate": 2.6426666666666665e-05, "loss": 0.9888, "step": 6561 }, { "epoch": 52.496, "grad_norm": 19.204750061035156, "learning_rate": 2.6422222222222227e-05, "loss": 1.3151, "step": 6562 }, { "epoch": 52.504, "grad_norm": 33.10382080078125, "learning_rate": 2.641777777777778e-05, "loss": 1.1741, "step": 6563 }, { "epoch": 52.512, "grad_norm": 25.368389129638672, "learning_rate": 2.6413333333333333e-05, "loss": 1.2468, "step": 6564 }, { "epoch": 52.52, "grad_norm": 14.396736145019531, "learning_rate": 2.6408888888888888e-05, "loss": 1.1692, "step": 6565 }, { "epoch": 52.528, "grad_norm": 41.88602828979492, "learning_rate": 2.640444444444445e-05, "loss": 1.1953, "step": 6566 }, { "epoch": 52.536, "grad_norm": 28.419254302978516, "learning_rate": 2.64e-05, "loss": 0.7742, "step": 6567 }, { "epoch": 52.544, "grad_norm": 41.308570861816406, "learning_rate": 2.6395555555555556e-05, "loss": 1.0369, "step": 6568 }, { "epoch": 52.552, "grad_norm": 23.454763412475586, "learning_rate": 2.639111111111111e-05, "loss": 0.8995, "step": 6569 }, { "epoch": 52.56, "grad_norm": 22.86697769165039, "learning_rate": 2.638666666666667e-05, "loss": 1.2901, "step": 6570 }, { "epoch": 52.568, "grad_norm": 26.695005416870117, "learning_rate": 2.6382222222222224e-05, "loss": 1.2704, "step": 6571 }, { "epoch": 52.576, "grad_norm": 35.93494415283203, "learning_rate": 2.637777777777778e-05, "loss": 1.3517, "step": 6572 }, { "epoch": 52.584, "grad_norm": 43.06911087036133, "learning_rate": 2.6373333333333334e-05, "loss": 0.871, "step": 6573 }, { "epoch": 52.592, "grad_norm": 26.29384422302246, "learning_rate": 2.636888888888889e-05, "loss": 0.9982, "step": 6574 }, { "epoch": 52.6, "grad_norm": 93.4670181274414, "learning_rate": 2.6364444444444447e-05, "loss": 1.1714, "step": 6575 }, { "epoch": 52.608, "grad_norm": 29.01388168334961, "learning_rate": 2.6360000000000002e-05, "loss": 0.8676, "step": 6576 }, { "epoch": 52.616, "grad_norm": 18.284008026123047, "learning_rate": 2.6355555555555557e-05, "loss": 1.2195, "step": 6577 }, { "epoch": 52.624, "grad_norm": 24.99852752685547, "learning_rate": 2.635111111111111e-05, "loss": 1.1915, "step": 6578 }, { "epoch": 52.632, "grad_norm": 41.257835388183594, "learning_rate": 2.634666666666667e-05, "loss": 1.1049, "step": 6579 }, { "epoch": 52.64, "grad_norm": 22.659282684326172, "learning_rate": 2.6342222222222224e-05, "loss": 1.5489, "step": 6580 }, { "epoch": 52.648, "grad_norm": 51.28184127807617, "learning_rate": 2.633777777777778e-05, "loss": 0.9621, "step": 6581 }, { "epoch": 52.656, "grad_norm": 30.754735946655273, "learning_rate": 2.633333333333333e-05, "loss": 1.1522, "step": 6582 }, { "epoch": 52.664, "grad_norm": 21.177913665771484, "learning_rate": 2.6328888888888892e-05, "loss": 1.1601, "step": 6583 }, { "epoch": 52.672, "grad_norm": 18.751754760742188, "learning_rate": 2.6324444444444447e-05, "loss": 1.3563, "step": 6584 }, { "epoch": 52.68, "grad_norm": 15.425297737121582, "learning_rate": 2.632e-05, "loss": 1.3111, "step": 6585 }, { "epoch": 52.688, "grad_norm": 32.965606689453125, "learning_rate": 2.6315555555555554e-05, "loss": 1.1561, "step": 6586 }, { "epoch": 52.696, "grad_norm": 27.588430404663086, "learning_rate": 2.6311111111111115e-05, "loss": 0.8584, "step": 6587 }, { "epoch": 52.704, "grad_norm": 74.92459869384766, "learning_rate": 2.630666666666667e-05, "loss": 1.2614, "step": 6588 }, { "epoch": 52.712, "grad_norm": 24.151439666748047, "learning_rate": 2.630222222222222e-05, "loss": 1.0728, "step": 6589 }, { "epoch": 52.72, "grad_norm": 15.730101585388184, "learning_rate": 2.6297777777777776e-05, "loss": 1.0479, "step": 6590 }, { "epoch": 52.728, "grad_norm": 38.541473388671875, "learning_rate": 2.6293333333333338e-05, "loss": 0.9779, "step": 6591 }, { "epoch": 52.736, "grad_norm": 39.44069290161133, "learning_rate": 2.628888888888889e-05, "loss": 1.1423, "step": 6592 }, { "epoch": 52.744, "grad_norm": 34.184364318847656, "learning_rate": 2.6284444444444444e-05, "loss": 0.9013, "step": 6593 }, { "epoch": 52.752, "grad_norm": 29.974658966064453, "learning_rate": 2.628e-05, "loss": 1.027, "step": 6594 }, { "epoch": 52.76, "grad_norm": 19.737226486206055, "learning_rate": 2.6275555555555557e-05, "loss": 1.087, "step": 6595 }, { "epoch": 52.768, "grad_norm": 23.703542709350586, "learning_rate": 2.6271111111111112e-05, "loss": 0.7887, "step": 6596 }, { "epoch": 52.776, "grad_norm": 19.47688865661621, "learning_rate": 2.6266666666666667e-05, "loss": 0.7734, "step": 6597 }, { "epoch": 52.784, "grad_norm": 20.417173385620117, "learning_rate": 2.6262222222222222e-05, "loss": 0.7082, "step": 6598 }, { "epoch": 52.792, "grad_norm": 28.012773513793945, "learning_rate": 2.625777777777778e-05, "loss": 2.7171, "step": 6599 }, { "epoch": 52.8, "grad_norm": 18.49938201904297, "learning_rate": 2.6253333333333335e-05, "loss": 1.5883, "step": 6600 }, { "epoch": 52.808, "grad_norm": 46.77493667602539, "learning_rate": 2.624888888888889e-05, "loss": 1.045, "step": 6601 }, { "epoch": 52.816, "grad_norm": 25.186941146850586, "learning_rate": 2.6244444444444445e-05, "loss": 1.1355, "step": 6602 }, { "epoch": 52.824, "grad_norm": 22.99761962890625, "learning_rate": 2.6240000000000003e-05, "loss": 0.7415, "step": 6603 }, { "epoch": 52.832, "grad_norm": 22.51603889465332, "learning_rate": 2.6235555555555558e-05, "loss": 1.1395, "step": 6604 }, { "epoch": 52.84, "grad_norm": 149.9355010986328, "learning_rate": 2.6231111111111113e-05, "loss": 0.9811, "step": 6605 }, { "epoch": 52.848, "grad_norm": 34.64366149902344, "learning_rate": 2.6226666666666667e-05, "loss": 1.2941, "step": 6606 }, { "epoch": 52.856, "grad_norm": 27.211040496826172, "learning_rate": 2.6222222222222226e-05, "loss": 1.103, "step": 6607 }, { "epoch": 52.864, "grad_norm": 52.942626953125, "learning_rate": 2.621777777777778e-05, "loss": 0.975, "step": 6608 }, { "epoch": 52.872, "grad_norm": 28.004484176635742, "learning_rate": 2.6213333333333335e-05, "loss": 1.5188, "step": 6609 }, { "epoch": 52.88, "grad_norm": 23.109664916992188, "learning_rate": 2.6208888888888887e-05, "loss": 0.7126, "step": 6610 }, { "epoch": 52.888, "grad_norm": 13.647992134094238, "learning_rate": 2.620444444444445e-05, "loss": 1.0076, "step": 6611 }, { "epoch": 52.896, "grad_norm": 24.112350463867188, "learning_rate": 2.6200000000000003e-05, "loss": 1.0547, "step": 6612 }, { "epoch": 52.904, "grad_norm": 48.1175537109375, "learning_rate": 2.6195555555555558e-05, "loss": 0.9826, "step": 6613 }, { "epoch": 52.912, "grad_norm": 13.478992462158203, "learning_rate": 2.619111111111111e-05, "loss": 0.9353, "step": 6614 }, { "epoch": 52.92, "grad_norm": 26.398283004760742, "learning_rate": 2.618666666666667e-05, "loss": 1.2155, "step": 6615 }, { "epoch": 52.928, "grad_norm": 35.7379150390625, "learning_rate": 2.6182222222222226e-05, "loss": 0.833, "step": 6616 }, { "epoch": 52.936, "grad_norm": 23.84579086303711, "learning_rate": 2.6177777777777777e-05, "loss": 2.1447, "step": 6617 }, { "epoch": 52.944, "grad_norm": 23.138423919677734, "learning_rate": 2.6173333333333332e-05, "loss": 0.8519, "step": 6618 }, { "epoch": 52.952, "grad_norm": 22.01460838317871, "learning_rate": 2.6168888888888894e-05, "loss": 0.6691, "step": 6619 }, { "epoch": 52.96, "grad_norm": 27.385326385498047, "learning_rate": 2.6164444444444445e-05, "loss": 1.0689, "step": 6620 }, { "epoch": 52.968, "grad_norm": 14.600380897521973, "learning_rate": 2.616e-05, "loss": 0.8161, "step": 6621 }, { "epoch": 52.976, "grad_norm": 14.053169250488281, "learning_rate": 2.6155555555555555e-05, "loss": 0.7947, "step": 6622 }, { "epoch": 52.984, "grad_norm": 23.574596405029297, "learning_rate": 2.6151111111111117e-05, "loss": 1.0319, "step": 6623 }, { "epoch": 52.992, "grad_norm": 36.53861618041992, "learning_rate": 2.6146666666666668e-05, "loss": 0.7071, "step": 6624 }, { "epoch": 53.0, "grad_norm": 26.487382888793945, "learning_rate": 2.6142222222222223e-05, "loss": 0.8801, "step": 6625 }, { "epoch": 53.0, "eval_loss": 1.1603130102157593, "eval_map": 0.3805, "eval_map_50": 0.7013, "eval_map_75": 0.3677, "eval_map_Coverall": 0.6287, "eval_map_Face_Shield": 0.4471, "eval_map_Gloves": 0.2775, "eval_map_Goggles": 0.1813, "eval_map_Mask": 0.3677, "eval_map_large": 0.573, "eval_map_medium": 0.2505, "eval_map_small": 0.2922, "eval_mar_1": 0.3033, "eval_mar_10": 0.544, "eval_mar_100": 0.5589, "eval_mar_100_Coverall": 0.7556, "eval_mar_100_Face_Shield": 0.6941, "eval_mar_100_Gloves": 0.3918, "eval_mar_100_Goggles": 0.4781, "eval_mar_100_Mask": 0.475, "eval_mar_large": 0.6848, "eval_mar_medium": 0.4238, "eval_mar_small": 0.3821, "eval_runtime": 0.9219, "eval_samples_per_second": 31.455, "eval_steps_per_second": 2.169, "step": 6625 }, { "epoch": 53.008, "grad_norm": 38.27794647216797, "learning_rate": 2.6137777777777778e-05, "loss": 0.8895, "step": 6626 }, { "epoch": 53.016, "grad_norm": 29.843730926513672, "learning_rate": 2.6133333333333333e-05, "loss": 0.97, "step": 6627 }, { "epoch": 53.024, "grad_norm": 18.870027542114258, "learning_rate": 2.612888888888889e-05, "loss": 1.0013, "step": 6628 }, { "epoch": 53.032, "grad_norm": 19.251901626586914, "learning_rate": 2.6124444444444446e-05, "loss": 1.1028, "step": 6629 }, { "epoch": 53.04, "grad_norm": 25.091079711914062, "learning_rate": 2.612e-05, "loss": 1.3028, "step": 6630 }, { "epoch": 53.048, "grad_norm": 26.260000228881836, "learning_rate": 2.6115555555555555e-05, "loss": 0.8825, "step": 6631 }, { "epoch": 53.056, "grad_norm": 26.93865966796875, "learning_rate": 2.6111111111111114e-05, "loss": 0.7764, "step": 6632 }, { "epoch": 53.064, "grad_norm": 21.13819122314453, "learning_rate": 2.610666666666667e-05, "loss": 1.1395, "step": 6633 }, { "epoch": 53.072, "grad_norm": 21.849348068237305, "learning_rate": 2.6102222222222223e-05, "loss": 0.9006, "step": 6634 }, { "epoch": 53.08, "grad_norm": 31.800979614257812, "learning_rate": 2.6097777777777775e-05, "loss": 1.1025, "step": 6635 }, { "epoch": 53.088, "grad_norm": 11.694580078125, "learning_rate": 2.6093333333333336e-05, "loss": 0.5532, "step": 6636 }, { "epoch": 53.096, "grad_norm": 25.8448429107666, "learning_rate": 2.608888888888889e-05, "loss": 1.2283, "step": 6637 }, { "epoch": 53.104, "grad_norm": 22.92031478881836, "learning_rate": 2.6084444444444446e-05, "loss": 0.626, "step": 6638 }, { "epoch": 53.112, "grad_norm": 16.510343551635742, "learning_rate": 2.6079999999999998e-05, "loss": 1.0327, "step": 6639 }, { "epoch": 53.12, "grad_norm": 18.91139030456543, "learning_rate": 2.607555555555556e-05, "loss": 0.7446, "step": 6640 }, { "epoch": 53.128, "grad_norm": 39.2826042175293, "learning_rate": 2.6071111111111114e-05, "loss": 1.0425, "step": 6641 }, { "epoch": 53.136, "grad_norm": 39.37729263305664, "learning_rate": 2.6066666666666666e-05, "loss": 0.9231, "step": 6642 }, { "epoch": 53.144, "grad_norm": 43.60700988769531, "learning_rate": 2.606222222222222e-05, "loss": 0.9476, "step": 6643 }, { "epoch": 53.152, "grad_norm": 40.83744812011719, "learning_rate": 2.6057777777777782e-05, "loss": 0.9318, "step": 6644 }, { "epoch": 53.16, "grad_norm": 28.16140365600586, "learning_rate": 2.6053333333333333e-05, "loss": 1.2079, "step": 6645 }, { "epoch": 53.168, "grad_norm": 17.638866424560547, "learning_rate": 2.604888888888889e-05, "loss": 1.252, "step": 6646 }, { "epoch": 53.176, "grad_norm": 23.7025089263916, "learning_rate": 2.6044444444444443e-05, "loss": 2.9488, "step": 6647 }, { "epoch": 53.184, "grad_norm": 34.77648162841797, "learning_rate": 2.6040000000000005e-05, "loss": 1.0363, "step": 6648 }, { "epoch": 53.192, "grad_norm": 32.037818908691406, "learning_rate": 2.6035555555555556e-05, "loss": 0.9528, "step": 6649 }, { "epoch": 53.2, "grad_norm": 80.0222396850586, "learning_rate": 2.603111111111111e-05, "loss": 0.9447, "step": 6650 }, { "epoch": 53.208, "grad_norm": 28.089139938354492, "learning_rate": 2.6026666666666666e-05, "loss": 0.8249, "step": 6651 }, { "epoch": 53.216, "grad_norm": 19.51973533630371, "learning_rate": 2.6022222222222224e-05, "loss": 1.345, "step": 6652 }, { "epoch": 53.224, "grad_norm": 52.58165740966797, "learning_rate": 2.601777777777778e-05, "loss": 0.7867, "step": 6653 }, { "epoch": 53.232, "grad_norm": 19.65974235534668, "learning_rate": 2.6013333333333334e-05, "loss": 2.1332, "step": 6654 }, { "epoch": 53.24, "grad_norm": 31.886028289794922, "learning_rate": 2.600888888888889e-05, "loss": 0.9495, "step": 6655 }, { "epoch": 53.248, "grad_norm": 40.13784408569336, "learning_rate": 2.6004444444444447e-05, "loss": 0.8795, "step": 6656 }, { "epoch": 53.256, "grad_norm": 23.183305740356445, "learning_rate": 2.6000000000000002e-05, "loss": 0.9275, "step": 6657 }, { "epoch": 53.264, "grad_norm": 21.866077423095703, "learning_rate": 2.5995555555555557e-05, "loss": 1.1078, "step": 6658 }, { "epoch": 53.272, "grad_norm": 31.45988655090332, "learning_rate": 2.599111111111111e-05, "loss": 0.985, "step": 6659 }, { "epoch": 53.28, "grad_norm": 58.74854278564453, "learning_rate": 2.598666666666667e-05, "loss": 1.4579, "step": 6660 }, { "epoch": 53.288, "grad_norm": 23.82440948486328, "learning_rate": 2.5982222222222225e-05, "loss": 0.9483, "step": 6661 }, { "epoch": 53.296, "grad_norm": 31.45026206970215, "learning_rate": 2.597777777777778e-05, "loss": 2.5, "step": 6662 }, { "epoch": 53.304, "grad_norm": 15.040565490722656, "learning_rate": 2.5973333333333334e-05, "loss": 1.2237, "step": 6663 }, { "epoch": 53.312, "grad_norm": 44.22246551513672, "learning_rate": 2.5968888888888892e-05, "loss": 1.3027, "step": 6664 }, { "epoch": 53.32, "grad_norm": 39.0490837097168, "learning_rate": 2.5964444444444447e-05, "loss": 0.9519, "step": 6665 }, { "epoch": 53.328, "grad_norm": 31.46180534362793, "learning_rate": 2.5960000000000002e-05, "loss": 1.1297, "step": 6666 }, { "epoch": 53.336, "grad_norm": 40.3535270690918, "learning_rate": 2.5955555555555554e-05, "loss": 0.7577, "step": 6667 }, { "epoch": 53.344, "grad_norm": 32.0141716003418, "learning_rate": 2.5951111111111115e-05, "loss": 0.9478, "step": 6668 }, { "epoch": 53.352, "grad_norm": 29.766721725463867, "learning_rate": 2.594666666666667e-05, "loss": 0.9091, "step": 6669 }, { "epoch": 53.36, "grad_norm": 17.127614974975586, "learning_rate": 2.5942222222222225e-05, "loss": 0.7863, "step": 6670 }, { "epoch": 53.368, "grad_norm": 17.515514373779297, "learning_rate": 2.5937777777777776e-05, "loss": 1.099, "step": 6671 }, { "epoch": 53.376, "grad_norm": 26.080720901489258, "learning_rate": 2.5933333333333338e-05, "loss": 1.13, "step": 6672 }, { "epoch": 53.384, "grad_norm": 15.416325569152832, "learning_rate": 2.5928888888888893e-05, "loss": 0.8829, "step": 6673 }, { "epoch": 53.392, "grad_norm": 16.905330657958984, "learning_rate": 2.5924444444444444e-05, "loss": 0.7263, "step": 6674 }, { "epoch": 53.4, "grad_norm": 30.796798706054688, "learning_rate": 2.592e-05, "loss": 1.0869, "step": 6675 }, { "epoch": 53.408, "grad_norm": 27.58052635192871, "learning_rate": 2.5915555555555554e-05, "loss": 1.0207, "step": 6676 }, { "epoch": 53.416, "grad_norm": 39.52075958251953, "learning_rate": 2.5911111111111112e-05, "loss": 1.9876, "step": 6677 }, { "epoch": 53.424, "grad_norm": 33.67447280883789, "learning_rate": 2.5906666666666667e-05, "loss": 1.0364, "step": 6678 }, { "epoch": 53.432, "grad_norm": 15.771596908569336, "learning_rate": 2.5902222222222222e-05, "loss": 0.7624, "step": 6679 }, { "epoch": 53.44, "grad_norm": 43.9600715637207, "learning_rate": 2.5897777777777777e-05, "loss": 0.9842, "step": 6680 }, { "epoch": 53.448, "grad_norm": 22.352127075195312, "learning_rate": 2.5893333333333335e-05, "loss": 1.2887, "step": 6681 }, { "epoch": 53.456, "grad_norm": 32.843685150146484, "learning_rate": 2.588888888888889e-05, "loss": 1.3445, "step": 6682 }, { "epoch": 53.464, "grad_norm": 34.579830169677734, "learning_rate": 2.5884444444444445e-05, "loss": 0.735, "step": 6683 }, { "epoch": 53.472, "grad_norm": 22.61783218383789, "learning_rate": 2.588e-05, "loss": 0.7475, "step": 6684 }, { "epoch": 53.48, "grad_norm": 20.903419494628906, "learning_rate": 2.5875555555555558e-05, "loss": 0.9637, "step": 6685 }, { "epoch": 53.488, "grad_norm": 14.03066349029541, "learning_rate": 2.5871111111111113e-05, "loss": 0.8761, "step": 6686 }, { "epoch": 53.496, "grad_norm": 20.193246841430664, "learning_rate": 2.5866666666666667e-05, "loss": 1.1017, "step": 6687 }, { "epoch": 53.504, "grad_norm": 27.62700080871582, "learning_rate": 2.5862222222222222e-05, "loss": 1.0369, "step": 6688 }, { "epoch": 53.512, "grad_norm": 11.602317810058594, "learning_rate": 2.585777777777778e-05, "loss": 0.8665, "step": 6689 }, { "epoch": 53.52, "grad_norm": 12.0299072265625, "learning_rate": 2.5853333333333335e-05, "loss": 0.8262, "step": 6690 }, { "epoch": 53.528, "grad_norm": 17.953462600708008, "learning_rate": 2.584888888888889e-05, "loss": 0.9503, "step": 6691 }, { "epoch": 53.536, "grad_norm": 28.077388763427734, "learning_rate": 2.5844444444444442e-05, "loss": 1.3959, "step": 6692 }, { "epoch": 53.544, "grad_norm": 63.32107925415039, "learning_rate": 2.5840000000000003e-05, "loss": 0.8283, "step": 6693 }, { "epoch": 53.552, "grad_norm": 27.69580841064453, "learning_rate": 2.5835555555555558e-05, "loss": 0.9956, "step": 6694 }, { "epoch": 53.56, "grad_norm": 20.595354080200195, "learning_rate": 2.5831111111111113e-05, "loss": 1.1847, "step": 6695 }, { "epoch": 53.568, "grad_norm": 24.99858283996582, "learning_rate": 2.5826666666666664e-05, "loss": 0.9398, "step": 6696 }, { "epoch": 53.576, "grad_norm": 37.24654769897461, "learning_rate": 2.5822222222222226e-05, "loss": 1.2654, "step": 6697 }, { "epoch": 53.584, "grad_norm": 31.69350814819336, "learning_rate": 2.581777777777778e-05, "loss": 0.6774, "step": 6698 }, { "epoch": 53.592, "grad_norm": 40.12624740600586, "learning_rate": 2.5813333333333332e-05, "loss": 1.2161, "step": 6699 }, { "epoch": 53.6, "grad_norm": 23.84035873413086, "learning_rate": 2.5808888888888887e-05, "loss": 1.0781, "step": 6700 }, { "epoch": 53.608, "grad_norm": 29.538005828857422, "learning_rate": 2.580444444444445e-05, "loss": 1.1179, "step": 6701 }, { "epoch": 53.616, "grad_norm": 27.919525146484375, "learning_rate": 2.58e-05, "loss": 0.9624, "step": 6702 }, { "epoch": 53.624, "grad_norm": 68.53205871582031, "learning_rate": 2.5795555555555555e-05, "loss": 1.848, "step": 6703 }, { "epoch": 53.632, "grad_norm": 30.17011833190918, "learning_rate": 2.579111111111111e-05, "loss": 0.7719, "step": 6704 }, { "epoch": 53.64, "grad_norm": 23.97882080078125, "learning_rate": 2.578666666666667e-05, "loss": 1.0088, "step": 6705 }, { "epoch": 53.648, "grad_norm": 85.4753646850586, "learning_rate": 2.5782222222222223e-05, "loss": 0.7032, "step": 6706 }, { "epoch": 53.656, "grad_norm": 23.73330307006836, "learning_rate": 2.5777777777777778e-05, "loss": 0.9047, "step": 6707 }, { "epoch": 53.664, "grad_norm": 22.26752281188965, "learning_rate": 2.5773333333333333e-05, "loss": 0.7306, "step": 6708 }, { "epoch": 53.672, "grad_norm": 29.23431396484375, "learning_rate": 2.576888888888889e-05, "loss": 0.9803, "step": 6709 }, { "epoch": 53.68, "grad_norm": 46.61362838745117, "learning_rate": 2.5764444444444446e-05, "loss": 0.8847, "step": 6710 }, { "epoch": 53.688, "grad_norm": 23.234827041625977, "learning_rate": 2.576e-05, "loss": 0.9511, "step": 6711 }, { "epoch": 53.696, "grad_norm": 27.508203506469727, "learning_rate": 2.5755555555555556e-05, "loss": 1.4247, "step": 6712 }, { "epoch": 53.704, "grad_norm": 56.31447219848633, "learning_rate": 2.5751111111111114e-05, "loss": 1.1746, "step": 6713 }, { "epoch": 53.712, "grad_norm": 34.637596130371094, "learning_rate": 2.574666666666667e-05, "loss": 1.0338, "step": 6714 }, { "epoch": 53.72, "grad_norm": 25.889596939086914, "learning_rate": 2.5742222222222223e-05, "loss": 0.8036, "step": 6715 }, { "epoch": 53.728, "grad_norm": 53.41375732421875, "learning_rate": 2.573777777777778e-05, "loss": 1.4664, "step": 6716 }, { "epoch": 53.736, "grad_norm": 17.42182159423828, "learning_rate": 2.5733333333333337e-05, "loss": 1.1994, "step": 6717 }, { "epoch": 53.744, "grad_norm": 21.704368591308594, "learning_rate": 2.572888888888889e-05, "loss": 2.1528, "step": 6718 }, { "epoch": 53.752, "grad_norm": 25.48456382751465, "learning_rate": 2.5724444444444446e-05, "loss": 0.9567, "step": 6719 }, { "epoch": 53.76, "grad_norm": 39.38522720336914, "learning_rate": 2.572e-05, "loss": 1.1421, "step": 6720 }, { "epoch": 53.768, "grad_norm": 21.6005802154541, "learning_rate": 2.571555555555556e-05, "loss": 0.8731, "step": 6721 }, { "epoch": 53.776, "grad_norm": 46.39870071411133, "learning_rate": 2.5711111111111114e-05, "loss": 0.5678, "step": 6722 }, { "epoch": 53.784, "grad_norm": 19.8133602142334, "learning_rate": 2.570666666666667e-05, "loss": 0.8349, "step": 6723 }, { "epoch": 53.792, "grad_norm": 28.266996383666992, "learning_rate": 2.570222222222222e-05, "loss": 0.868, "step": 6724 }, { "epoch": 53.8, "grad_norm": 45.81742477416992, "learning_rate": 2.5697777777777782e-05, "loss": 0.865, "step": 6725 }, { "epoch": 53.808, "grad_norm": 33.35280990600586, "learning_rate": 2.5693333333333337e-05, "loss": 1.0263, "step": 6726 }, { "epoch": 53.816, "grad_norm": 240.96534729003906, "learning_rate": 2.5688888888888892e-05, "loss": 1.3753, "step": 6727 }, { "epoch": 53.824, "grad_norm": 22.997844696044922, "learning_rate": 2.5684444444444443e-05, "loss": 1.1325, "step": 6728 }, { "epoch": 53.832, "grad_norm": 69.36747741699219, "learning_rate": 2.5679999999999998e-05, "loss": 1.0479, "step": 6729 }, { "epoch": 53.84, "grad_norm": 25.63211441040039, "learning_rate": 2.567555555555556e-05, "loss": 1.1731, "step": 6730 }, { "epoch": 53.848, "grad_norm": 43.79378128051758, "learning_rate": 2.567111111111111e-05, "loss": 1.062, "step": 6731 }, { "epoch": 53.856, "grad_norm": 17.99799919128418, "learning_rate": 2.5666666666666666e-05, "loss": 1.0457, "step": 6732 }, { "epoch": 53.864, "grad_norm": 34.09407424926758, "learning_rate": 2.566222222222222e-05, "loss": 1.4657, "step": 6733 }, { "epoch": 53.872, "grad_norm": 113.37450408935547, "learning_rate": 2.565777777777778e-05, "loss": 0.9325, "step": 6734 }, { "epoch": 53.88, "grad_norm": 361.75469970703125, "learning_rate": 2.5653333333333334e-05, "loss": 0.8462, "step": 6735 }, { "epoch": 53.888, "grad_norm": 28.636127471923828, "learning_rate": 2.564888888888889e-05, "loss": 0.8009, "step": 6736 }, { "epoch": 53.896, "grad_norm": 30.39713478088379, "learning_rate": 2.5644444444444444e-05, "loss": 0.9786, "step": 6737 }, { "epoch": 53.904, "grad_norm": 67.67996215820312, "learning_rate": 2.5640000000000002e-05, "loss": 1.4634, "step": 6738 }, { "epoch": 53.912, "grad_norm": 23.14980697631836, "learning_rate": 2.5635555555555557e-05, "loss": 0.9965, "step": 6739 }, { "epoch": 53.92, "grad_norm": 48.62776184082031, "learning_rate": 2.563111111111111e-05, "loss": 1.3771, "step": 6740 }, { "epoch": 53.928, "grad_norm": 25.89461326599121, "learning_rate": 2.5626666666666666e-05, "loss": 0.7269, "step": 6741 }, { "epoch": 53.936, "grad_norm": 34.85398483276367, "learning_rate": 2.5622222222222225e-05, "loss": 1.3855, "step": 6742 }, { "epoch": 53.944, "grad_norm": 29.657875061035156, "learning_rate": 2.561777777777778e-05, "loss": 1.2284, "step": 6743 }, { "epoch": 53.952, "grad_norm": 17.64277458190918, "learning_rate": 2.5613333333333334e-05, "loss": 1.052, "step": 6744 }, { "epoch": 53.96, "grad_norm": 19.151174545288086, "learning_rate": 2.560888888888889e-05, "loss": 0.8298, "step": 6745 }, { "epoch": 53.968, "grad_norm": 38.889400482177734, "learning_rate": 2.5604444444444447e-05, "loss": 1.2328, "step": 6746 }, { "epoch": 53.976, "grad_norm": 18.77174949645996, "learning_rate": 2.5600000000000002e-05, "loss": 1.1124, "step": 6747 }, { "epoch": 53.984, "grad_norm": 110.83616638183594, "learning_rate": 2.5595555555555557e-05, "loss": 1.2576, "step": 6748 }, { "epoch": 53.992, "grad_norm": 30.992374420166016, "learning_rate": 2.559111111111111e-05, "loss": 0.8425, "step": 6749 }, { "epoch": 54.0, "grad_norm": 63.586326599121094, "learning_rate": 2.558666666666667e-05, "loss": 0.6931, "step": 6750 }, { "epoch": 54.0, "eval_loss": 1.0811247825622559, "eval_map": 0.4027, "eval_map_50": 0.7578, "eval_map_75": 0.3896, "eval_map_Coverall": 0.6012, "eval_map_Face_Shield": 0.4641, "eval_map_Gloves": 0.296, "eval_map_Goggles": 0.21, "eval_map_Mask": 0.4423, "eval_map_large": 0.5756, "eval_map_medium": 0.2813, "eval_map_small": 0.2863, "eval_mar_1": 0.3067, "eval_mar_10": 0.547, "eval_mar_100": 0.5573, "eval_mar_100_Coverall": 0.7511, "eval_mar_100_Face_Shield": 0.6588, "eval_mar_100_Gloves": 0.4115, "eval_mar_100_Goggles": 0.4437, "eval_mar_100_Mask": 0.5212, "eval_mar_large": 0.6592, "eval_mar_medium": 0.4224, "eval_mar_small": 0.3793, "eval_runtime": 0.9295, "eval_samples_per_second": 31.2, "eval_steps_per_second": 2.152, "step": 6750 }, { "epoch": 54.008, "grad_norm": 18.268701553344727, "learning_rate": 2.5582222222222225e-05, "loss": 1.1371, "step": 6751 }, { "epoch": 54.016, "grad_norm": 18.741491317749023, "learning_rate": 2.557777777777778e-05, "loss": 1.2344, "step": 6752 }, { "epoch": 54.024, "grad_norm": 21.2153263092041, "learning_rate": 2.557333333333333e-05, "loss": 1.1786, "step": 6753 }, { "epoch": 54.032, "grad_norm": 43.992881774902344, "learning_rate": 2.5568888888888893e-05, "loss": 0.9302, "step": 6754 }, { "epoch": 54.04, "grad_norm": 15.867815971374512, "learning_rate": 2.5564444444444448e-05, "loss": 0.8586, "step": 6755 }, { "epoch": 54.048, "grad_norm": 25.199478149414062, "learning_rate": 2.556e-05, "loss": 1.3162, "step": 6756 }, { "epoch": 54.056, "grad_norm": 26.249217987060547, "learning_rate": 2.5555555555555554e-05, "loss": 0.6855, "step": 6757 }, { "epoch": 54.064, "grad_norm": 30.126062393188477, "learning_rate": 2.5551111111111116e-05, "loss": 0.9321, "step": 6758 }, { "epoch": 54.072, "grad_norm": 12.022272109985352, "learning_rate": 2.5546666666666667e-05, "loss": 0.6834, "step": 6759 }, { "epoch": 54.08, "grad_norm": 29.841814041137695, "learning_rate": 2.5542222222222222e-05, "loss": 0.8264, "step": 6760 }, { "epoch": 54.088, "grad_norm": 25.637226104736328, "learning_rate": 2.5537777777777777e-05, "loss": 0.7516, "step": 6761 }, { "epoch": 54.096, "grad_norm": 19.03598976135254, "learning_rate": 2.553333333333334e-05, "loss": 1.0328, "step": 6762 }, { "epoch": 54.104, "grad_norm": 21.928022384643555, "learning_rate": 2.552888888888889e-05, "loss": 0.7207, "step": 6763 }, { "epoch": 54.112, "grad_norm": 36.954010009765625, "learning_rate": 2.5524444444444445e-05, "loss": 1.3482, "step": 6764 }, { "epoch": 54.12, "grad_norm": 15.582106590270996, "learning_rate": 2.552e-05, "loss": 1.1424, "step": 6765 }, { "epoch": 54.128, "grad_norm": 24.66378402709961, "learning_rate": 2.5515555555555558e-05, "loss": 0.7279, "step": 6766 }, { "epoch": 54.136, "grad_norm": 30.763885498046875, "learning_rate": 2.5511111111111113e-05, "loss": 0.9158, "step": 6767 }, { "epoch": 54.144, "grad_norm": 37.48677062988281, "learning_rate": 2.5506666666666668e-05, "loss": 0.8591, "step": 6768 }, { "epoch": 54.152, "grad_norm": 45.36506271362305, "learning_rate": 2.5502222222222222e-05, "loss": 0.7752, "step": 6769 }, { "epoch": 54.16, "grad_norm": 36.70094680786133, "learning_rate": 2.549777777777778e-05, "loss": 0.7518, "step": 6770 }, { "epoch": 54.168, "grad_norm": 37.89729690551758, "learning_rate": 2.5493333333333335e-05, "loss": 0.9826, "step": 6771 }, { "epoch": 54.176, "grad_norm": 41.60601043701172, "learning_rate": 2.548888888888889e-05, "loss": 0.996, "step": 6772 }, { "epoch": 54.184, "grad_norm": 30.3851318359375, "learning_rate": 2.5484444444444445e-05, "loss": 0.6611, "step": 6773 }, { "epoch": 54.192, "grad_norm": 21.323287963867188, "learning_rate": 2.5480000000000003e-05, "loss": 0.6437, "step": 6774 }, { "epoch": 54.2, "grad_norm": 13.27097225189209, "learning_rate": 2.5475555555555558e-05, "loss": 0.6468, "step": 6775 }, { "epoch": 54.208, "grad_norm": 28.553380966186523, "learning_rate": 2.5471111111111113e-05, "loss": 0.6609, "step": 6776 }, { "epoch": 54.216, "grad_norm": 22.4808292388916, "learning_rate": 2.5466666666666668e-05, "loss": 0.9732, "step": 6777 }, { "epoch": 54.224, "grad_norm": 23.863460540771484, "learning_rate": 2.5462222222222226e-05, "loss": 0.8987, "step": 6778 }, { "epoch": 54.232, "grad_norm": 25.312957763671875, "learning_rate": 2.545777777777778e-05, "loss": 1.2777, "step": 6779 }, { "epoch": 54.24, "grad_norm": 18.693613052368164, "learning_rate": 2.5453333333333336e-05, "loss": 1.0696, "step": 6780 }, { "epoch": 54.248, "grad_norm": 42.07122039794922, "learning_rate": 2.5448888888888887e-05, "loss": 0.6597, "step": 6781 }, { "epoch": 54.256, "grad_norm": 41.21282958984375, "learning_rate": 2.5444444444444442e-05, "loss": 0.9107, "step": 6782 }, { "epoch": 54.264, "grad_norm": 24.447017669677734, "learning_rate": 2.5440000000000004e-05, "loss": 0.8297, "step": 6783 }, { "epoch": 54.272, "grad_norm": 28.061491012573242, "learning_rate": 2.543555555555556e-05, "loss": 0.8154, "step": 6784 }, { "epoch": 54.28, "grad_norm": 36.49888610839844, "learning_rate": 2.543111111111111e-05, "loss": 0.7114, "step": 6785 }, { "epoch": 54.288, "grad_norm": 20.912038803100586, "learning_rate": 2.5426666666666665e-05, "loss": 1.071, "step": 6786 }, { "epoch": 54.296, "grad_norm": 29.659448623657227, "learning_rate": 2.5422222222222227e-05, "loss": 0.9103, "step": 6787 }, { "epoch": 54.304, "grad_norm": 17.632835388183594, "learning_rate": 2.5417777777777778e-05, "loss": 2.3099, "step": 6788 }, { "epoch": 54.312, "grad_norm": 14.149951934814453, "learning_rate": 2.5413333333333333e-05, "loss": 1.0167, "step": 6789 }, { "epoch": 54.32, "grad_norm": 23.518325805664062, "learning_rate": 2.5408888888888888e-05, "loss": 1.0793, "step": 6790 }, { "epoch": 54.328, "grad_norm": 37.29114532470703, "learning_rate": 2.5404444444444446e-05, "loss": 1.19, "step": 6791 }, { "epoch": 54.336, "grad_norm": 54.87783432006836, "learning_rate": 2.54e-05, "loss": 0.9584, "step": 6792 }, { "epoch": 54.344, "grad_norm": 29.18644905090332, "learning_rate": 2.5395555555555556e-05, "loss": 1.2253, "step": 6793 }, { "epoch": 54.352, "grad_norm": 19.929302215576172, "learning_rate": 2.539111111111111e-05, "loss": 1.1096, "step": 6794 }, { "epoch": 54.36, "grad_norm": 59.230377197265625, "learning_rate": 2.538666666666667e-05, "loss": 1.0528, "step": 6795 }, { "epoch": 54.368, "grad_norm": 45.622806549072266, "learning_rate": 2.5382222222222224e-05, "loss": 0.8711, "step": 6796 }, { "epoch": 54.376, "grad_norm": 30.36219024658203, "learning_rate": 2.537777777777778e-05, "loss": 1.247, "step": 6797 }, { "epoch": 54.384, "grad_norm": 11.709420204162598, "learning_rate": 2.5373333333333333e-05, "loss": 1.0071, "step": 6798 }, { "epoch": 54.392, "grad_norm": 24.03416633605957, "learning_rate": 2.536888888888889e-05, "loss": 1.0675, "step": 6799 }, { "epoch": 54.4, "grad_norm": 14.262415885925293, "learning_rate": 2.5364444444444446e-05, "loss": 0.9034, "step": 6800 }, { "epoch": 54.408, "grad_norm": 20.87900733947754, "learning_rate": 2.536e-05, "loss": 0.7498, "step": 6801 }, { "epoch": 54.416, "grad_norm": 17.94904327392578, "learning_rate": 2.5355555555555556e-05, "loss": 0.6646, "step": 6802 }, { "epoch": 54.424, "grad_norm": 40.3323860168457, "learning_rate": 2.5351111111111114e-05, "loss": 0.9781, "step": 6803 }, { "epoch": 54.432, "grad_norm": 25.847131729125977, "learning_rate": 2.534666666666667e-05, "loss": 1.0746, "step": 6804 }, { "epoch": 54.44, "grad_norm": 24.554025650024414, "learning_rate": 2.5342222222222224e-05, "loss": 1.6105, "step": 6805 }, { "epoch": 54.448, "grad_norm": 25.710294723510742, "learning_rate": 2.5337777777777775e-05, "loss": 1.1815, "step": 6806 }, { "epoch": 54.456, "grad_norm": 18.969898223876953, "learning_rate": 2.5333333333333337e-05, "loss": 0.9246, "step": 6807 }, { "epoch": 54.464, "grad_norm": 41.52224349975586, "learning_rate": 2.5328888888888892e-05, "loss": 0.8117, "step": 6808 }, { "epoch": 54.472, "grad_norm": 29.44744110107422, "learning_rate": 2.5324444444444447e-05, "loss": 1.0182, "step": 6809 }, { "epoch": 54.48, "grad_norm": 17.045032501220703, "learning_rate": 2.5319999999999998e-05, "loss": 1.277, "step": 6810 }, { "epoch": 54.488, "grad_norm": 22.019683837890625, "learning_rate": 2.531555555555556e-05, "loss": 0.9129, "step": 6811 }, { "epoch": 54.496, "grad_norm": 20.59288787841797, "learning_rate": 2.5311111111111115e-05, "loss": 0.7827, "step": 6812 }, { "epoch": 54.504, "grad_norm": 35.86687469482422, "learning_rate": 2.5306666666666666e-05, "loss": 0.996, "step": 6813 }, { "epoch": 54.512, "grad_norm": 101.65962982177734, "learning_rate": 2.530222222222222e-05, "loss": 0.7765, "step": 6814 }, { "epoch": 54.52, "grad_norm": 45.124534606933594, "learning_rate": 2.5297777777777783e-05, "loss": 0.95, "step": 6815 }, { "epoch": 54.528, "grad_norm": 21.941749572753906, "learning_rate": 2.5293333333333334e-05, "loss": 1.1881, "step": 6816 }, { "epoch": 54.536, "grad_norm": 29.453662872314453, "learning_rate": 2.528888888888889e-05, "loss": 1.0749, "step": 6817 }, { "epoch": 54.544, "grad_norm": 20.232471466064453, "learning_rate": 2.5284444444444444e-05, "loss": 1.3664, "step": 6818 }, { "epoch": 54.552, "grad_norm": 46.19097900390625, "learning_rate": 2.5280000000000005e-05, "loss": 1.3308, "step": 6819 }, { "epoch": 54.56, "grad_norm": 47.74660873413086, "learning_rate": 2.5275555555555557e-05, "loss": 0.8082, "step": 6820 }, { "epoch": 54.568, "grad_norm": 33.29015350341797, "learning_rate": 2.527111111111111e-05, "loss": 1.0075, "step": 6821 }, { "epoch": 54.576, "grad_norm": 19.446855545043945, "learning_rate": 2.5266666666666666e-05, "loss": 0.9788, "step": 6822 }, { "epoch": 54.584, "grad_norm": 25.823036193847656, "learning_rate": 2.5262222222222225e-05, "loss": 0.5484, "step": 6823 }, { "epoch": 54.592, "grad_norm": 33.08415985107422, "learning_rate": 2.525777777777778e-05, "loss": 0.6906, "step": 6824 }, { "epoch": 54.6, "grad_norm": 126.75607299804688, "learning_rate": 2.5253333333333334e-05, "loss": 2.0961, "step": 6825 }, { "epoch": 54.608, "grad_norm": 22.4831600189209, "learning_rate": 2.524888888888889e-05, "loss": 0.7908, "step": 6826 }, { "epoch": 54.616, "grad_norm": 21.561124801635742, "learning_rate": 2.5244444444444447e-05, "loss": 1.0937, "step": 6827 }, { "epoch": 54.624, "grad_norm": 20.32414436340332, "learning_rate": 2.5240000000000002e-05, "loss": 0.8085, "step": 6828 }, { "epoch": 54.632, "grad_norm": 24.26671028137207, "learning_rate": 2.5235555555555557e-05, "loss": 1.0802, "step": 6829 }, { "epoch": 54.64, "grad_norm": 12.190409660339355, "learning_rate": 2.5231111111111112e-05, "loss": 1.1056, "step": 6830 }, { "epoch": 54.648, "grad_norm": 21.443880081176758, "learning_rate": 2.5226666666666663e-05, "loss": 0.9373, "step": 6831 }, { "epoch": 54.656, "grad_norm": 43.616512298583984, "learning_rate": 2.5222222222222225e-05, "loss": 1.1939, "step": 6832 }, { "epoch": 54.664, "grad_norm": 24.96826934814453, "learning_rate": 2.521777777777778e-05, "loss": 0.923, "step": 6833 }, { "epoch": 54.672, "grad_norm": 14.431746482849121, "learning_rate": 2.5213333333333335e-05, "loss": 1.1432, "step": 6834 }, { "epoch": 54.68, "grad_norm": 35.09876251220703, "learning_rate": 2.5208888888888886e-05, "loss": 1.016, "step": 6835 }, { "epoch": 54.688, "grad_norm": 114.24858856201172, "learning_rate": 2.5204444444444448e-05, "loss": 0.9711, "step": 6836 }, { "epoch": 54.696, "grad_norm": 26.597450256347656, "learning_rate": 2.5200000000000003e-05, "loss": 1.0376, "step": 6837 }, { "epoch": 54.704, "grad_norm": 26.114456176757812, "learning_rate": 2.5195555555555554e-05, "loss": 0.9188, "step": 6838 }, { "epoch": 54.712, "grad_norm": 17.215158462524414, "learning_rate": 2.519111111111111e-05, "loss": 1.0119, "step": 6839 }, { "epoch": 54.72, "grad_norm": 43.22956085205078, "learning_rate": 2.518666666666667e-05, "loss": 0.9343, "step": 6840 }, { "epoch": 54.728, "grad_norm": 36.05755615234375, "learning_rate": 2.5182222222222222e-05, "loss": 1.0636, "step": 6841 }, { "epoch": 54.736, "grad_norm": 30.515087127685547, "learning_rate": 2.5177777777777777e-05, "loss": 0.9058, "step": 6842 }, { "epoch": 54.744, "grad_norm": 26.082752227783203, "learning_rate": 2.5173333333333332e-05, "loss": 1.0562, "step": 6843 }, { "epoch": 54.752, "grad_norm": 20.421300888061523, "learning_rate": 2.5168888888888893e-05, "loss": 1.0175, "step": 6844 }, { "epoch": 54.76, "grad_norm": 39.87616729736328, "learning_rate": 2.5164444444444445e-05, "loss": 1.0535, "step": 6845 }, { "epoch": 54.768, "grad_norm": 16.501041412353516, "learning_rate": 2.516e-05, "loss": 1.3263, "step": 6846 }, { "epoch": 54.776, "grad_norm": 34.434913635253906, "learning_rate": 2.5155555555555555e-05, "loss": 1.9214, "step": 6847 }, { "epoch": 54.784, "grad_norm": 146.43988037109375, "learning_rate": 2.5151111111111113e-05, "loss": 0.8127, "step": 6848 }, { "epoch": 54.792, "grad_norm": 18.286205291748047, "learning_rate": 2.5146666666666668e-05, "loss": 0.9405, "step": 6849 }, { "epoch": 54.8, "grad_norm": 22.621198654174805, "learning_rate": 2.5142222222222222e-05, "loss": 1.2423, "step": 6850 }, { "epoch": 54.808, "grad_norm": 43.8050537109375, "learning_rate": 2.5137777777777777e-05, "loss": 1.3856, "step": 6851 }, { "epoch": 54.816, "grad_norm": 32.783180236816406, "learning_rate": 2.5133333333333336e-05, "loss": 0.7398, "step": 6852 }, { "epoch": 54.824, "grad_norm": 35.61341094970703, "learning_rate": 2.512888888888889e-05, "loss": 0.8932, "step": 6853 }, { "epoch": 54.832, "grad_norm": 22.40219497680664, "learning_rate": 2.5124444444444445e-05, "loss": 1.1998, "step": 6854 }, { "epoch": 54.84, "grad_norm": 178.51600646972656, "learning_rate": 2.512e-05, "loss": 0.8622, "step": 6855 }, { "epoch": 54.848, "grad_norm": 22.373516082763672, "learning_rate": 2.5115555555555558e-05, "loss": 1.1229, "step": 6856 }, { "epoch": 54.856, "grad_norm": 33.34591293334961, "learning_rate": 2.5111111111111113e-05, "loss": 1.215, "step": 6857 }, { "epoch": 54.864, "grad_norm": 51.39658737182617, "learning_rate": 2.5106666666666668e-05, "loss": 1.8643, "step": 6858 }, { "epoch": 54.872, "grad_norm": 22.54905891418457, "learning_rate": 2.5102222222222223e-05, "loss": 1.0045, "step": 6859 }, { "epoch": 54.88, "grad_norm": 37.732574462890625, "learning_rate": 2.509777777777778e-05, "loss": 0.9887, "step": 6860 }, { "epoch": 54.888, "grad_norm": 35.18236541748047, "learning_rate": 2.5093333333333336e-05, "loss": 1.428, "step": 6861 }, { "epoch": 54.896, "grad_norm": 24.35227394104004, "learning_rate": 2.508888888888889e-05, "loss": 1.1565, "step": 6862 }, { "epoch": 54.904, "grad_norm": 21.62031364440918, "learning_rate": 2.5084444444444442e-05, "loss": 1.7452, "step": 6863 }, { "epoch": 54.912, "grad_norm": 39.629608154296875, "learning_rate": 2.5080000000000004e-05, "loss": 0.9133, "step": 6864 }, { "epoch": 54.92, "grad_norm": 78.05783081054688, "learning_rate": 2.507555555555556e-05, "loss": 0.5795, "step": 6865 }, { "epoch": 54.928, "grad_norm": 32.602413177490234, "learning_rate": 2.5071111111111114e-05, "loss": 1.0135, "step": 6866 }, { "epoch": 54.936, "grad_norm": 21.880382537841797, "learning_rate": 2.5066666666666665e-05, "loss": 0.9841, "step": 6867 }, { "epoch": 54.944, "grad_norm": 33.12051773071289, "learning_rate": 2.5062222222222227e-05, "loss": 1.0724, "step": 6868 }, { "epoch": 54.952, "grad_norm": 25.5601863861084, "learning_rate": 2.505777777777778e-05, "loss": 1.0189, "step": 6869 }, { "epoch": 54.96, "grad_norm": 29.491119384765625, "learning_rate": 2.5053333333333333e-05, "loss": 1.0093, "step": 6870 }, { "epoch": 54.968, "grad_norm": 43.2650032043457, "learning_rate": 2.5048888888888888e-05, "loss": 1.3014, "step": 6871 }, { "epoch": 54.976, "grad_norm": 16.03611183166504, "learning_rate": 2.504444444444445e-05, "loss": 3.156, "step": 6872 }, { "epoch": 54.984, "grad_norm": 36.26527404785156, "learning_rate": 2.504e-05, "loss": 1.0981, "step": 6873 }, { "epoch": 54.992, "grad_norm": 40.84269332885742, "learning_rate": 2.5035555555555556e-05, "loss": 1.03, "step": 6874 }, { "epoch": 55.0, "grad_norm": 15.686681747436523, "learning_rate": 2.503111111111111e-05, "loss": 0.599, "step": 6875 }, { "epoch": 55.0, "eval_loss": 1.0178303718566895, "eval_map": 0.4117, "eval_map_50": 0.7519, "eval_map_75": 0.4183, "eval_map_Coverall": 0.6093, "eval_map_Face_Shield": 0.4708, "eval_map_Gloves": 0.351, "eval_map_Goggles": 0.1906, "eval_map_Mask": 0.4366, "eval_map_large": 0.5766, "eval_map_medium": 0.283, "eval_map_small": 0.3688, "eval_mar_1": 0.31, "eval_mar_10": 0.5621, "eval_mar_100": 0.5728, "eval_mar_100_Coverall": 0.7511, "eval_mar_100_Face_Shield": 0.7059, "eval_mar_100_Gloves": 0.4672, "eval_mar_100_Goggles": 0.4281, "eval_mar_100_Mask": 0.5115, "eval_mar_large": 0.6912, "eval_mar_medium": 0.479, "eval_mar_small": 0.4186, "eval_runtime": 0.926, "eval_samples_per_second": 31.317, "eval_steps_per_second": 2.16, "step": 6875 }, { "epoch": 55.008, "grad_norm": 19.825979232788086, "learning_rate": 2.5026666666666672e-05, "loss": 1.2984, "step": 6876 }, { "epoch": 55.016, "grad_norm": 29.10738754272461, "learning_rate": 2.5022222222222224e-05, "loss": 1.0813, "step": 6877 }, { "epoch": 55.024, "grad_norm": 31.702917098999023, "learning_rate": 2.501777777777778e-05, "loss": 1.1516, "step": 6878 }, { "epoch": 55.032, "grad_norm": 18.20381736755371, "learning_rate": 2.5013333333333333e-05, "loss": 0.7839, "step": 6879 }, { "epoch": 55.04, "grad_norm": 9.597158432006836, "learning_rate": 2.500888888888889e-05, "loss": 1.2179, "step": 6880 }, { "epoch": 55.048, "grad_norm": 19.450681686401367, "learning_rate": 2.5004444444444446e-05, "loss": 0.9931, "step": 6881 }, { "epoch": 55.056, "grad_norm": 26.863052368164062, "learning_rate": 2.5e-05, "loss": 0.9267, "step": 6882 }, { "epoch": 55.064, "grad_norm": 30.075654983520508, "learning_rate": 2.4995555555555556e-05, "loss": 1.2034, "step": 6883 }, { "epoch": 55.072, "grad_norm": 16.1304931640625, "learning_rate": 2.499111111111111e-05, "loss": 1.0971, "step": 6884 }, { "epoch": 55.08, "grad_norm": 25.229650497436523, "learning_rate": 2.4986666666666666e-05, "loss": 0.9005, "step": 6885 }, { "epoch": 55.088, "grad_norm": 37.002838134765625, "learning_rate": 2.4982222222222224e-05, "loss": 1.0243, "step": 6886 }, { "epoch": 55.096, "grad_norm": 27.505592346191406, "learning_rate": 2.497777777777778e-05, "loss": 1.2186, "step": 6887 }, { "epoch": 55.104, "grad_norm": 19.847623825073242, "learning_rate": 2.4973333333333334e-05, "loss": 1.1529, "step": 6888 }, { "epoch": 55.112, "grad_norm": 15.204727172851562, "learning_rate": 2.496888888888889e-05, "loss": 1.3089, "step": 6889 }, { "epoch": 55.12, "grad_norm": 15.308866500854492, "learning_rate": 2.4964444444444447e-05, "loss": 0.6294, "step": 6890 }, { "epoch": 55.128, "grad_norm": 42.54683303833008, "learning_rate": 2.496e-05, "loss": 1.1116, "step": 6891 }, { "epoch": 55.136, "grad_norm": 24.88314437866211, "learning_rate": 2.4955555555555556e-05, "loss": 0.8399, "step": 6892 }, { "epoch": 55.144, "grad_norm": 21.79567527770996, "learning_rate": 2.495111111111111e-05, "loss": 0.8032, "step": 6893 }, { "epoch": 55.152, "grad_norm": 21.914045333862305, "learning_rate": 2.494666666666667e-05, "loss": 0.9752, "step": 6894 }, { "epoch": 55.16, "grad_norm": 32.045387268066406, "learning_rate": 2.494222222222222e-05, "loss": 1.7378, "step": 6895 }, { "epoch": 55.168, "grad_norm": 37.168907165527344, "learning_rate": 2.493777777777778e-05, "loss": 1.0337, "step": 6896 }, { "epoch": 55.176, "grad_norm": 28.189382553100586, "learning_rate": 2.4933333333333334e-05, "loss": 1.0285, "step": 6897 }, { "epoch": 55.184, "grad_norm": 56.63614273071289, "learning_rate": 2.492888888888889e-05, "loss": 0.5695, "step": 6898 }, { "epoch": 55.192, "grad_norm": 58.09559631347656, "learning_rate": 2.4924444444444444e-05, "loss": 0.9041, "step": 6899 }, { "epoch": 55.2, "grad_norm": 17.206472396850586, "learning_rate": 2.4920000000000002e-05, "loss": 0.9655, "step": 6900 }, { "epoch": 55.208, "grad_norm": 25.94927215576172, "learning_rate": 2.4915555555555557e-05, "loss": 1.0756, "step": 6901 }, { "epoch": 55.216, "grad_norm": 25.41084098815918, "learning_rate": 2.491111111111111e-05, "loss": 1.0207, "step": 6902 }, { "epoch": 55.224, "grad_norm": 18.882301330566406, "learning_rate": 2.4906666666666666e-05, "loss": 0.88, "step": 6903 }, { "epoch": 55.232, "grad_norm": 25.948713302612305, "learning_rate": 2.4902222222222225e-05, "loss": 1.061, "step": 6904 }, { "epoch": 55.24, "grad_norm": 23.624797821044922, "learning_rate": 2.489777777777778e-05, "loss": 1.1814, "step": 6905 }, { "epoch": 55.248, "grad_norm": 17.540313720703125, "learning_rate": 2.4893333333333334e-05, "loss": 0.9807, "step": 6906 }, { "epoch": 55.256, "grad_norm": 30.75100326538086, "learning_rate": 2.488888888888889e-05, "loss": 0.8591, "step": 6907 }, { "epoch": 55.264, "grad_norm": 15.314314842224121, "learning_rate": 2.4884444444444448e-05, "loss": 1.0037, "step": 6908 }, { "epoch": 55.272, "grad_norm": 40.09922409057617, "learning_rate": 2.488e-05, "loss": 1.1253, "step": 6909 }, { "epoch": 55.28, "grad_norm": 18.918682098388672, "learning_rate": 2.4875555555555557e-05, "loss": 0.8956, "step": 6910 }, { "epoch": 55.288, "grad_norm": 38.57953643798828, "learning_rate": 2.4871111111111112e-05, "loss": 1.2223, "step": 6911 }, { "epoch": 55.296, "grad_norm": 61.790924072265625, "learning_rate": 2.486666666666667e-05, "loss": 1.0664, "step": 6912 }, { "epoch": 55.304, "grad_norm": 26.023235321044922, "learning_rate": 2.4862222222222222e-05, "loss": 0.9112, "step": 6913 }, { "epoch": 55.312, "grad_norm": 19.748777389526367, "learning_rate": 2.485777777777778e-05, "loss": 1.3211, "step": 6914 }, { "epoch": 55.32, "grad_norm": 21.508230209350586, "learning_rate": 2.4853333333333335e-05, "loss": 0.678, "step": 6915 }, { "epoch": 55.328, "grad_norm": 26.772069931030273, "learning_rate": 2.484888888888889e-05, "loss": 1.1909, "step": 6916 }, { "epoch": 55.336, "grad_norm": 26.34773063659668, "learning_rate": 2.4844444444444444e-05, "loss": 1.339, "step": 6917 }, { "epoch": 55.344, "grad_norm": 32.38795852661133, "learning_rate": 2.4840000000000003e-05, "loss": 1.1196, "step": 6918 }, { "epoch": 55.352, "grad_norm": 17.776968002319336, "learning_rate": 2.4835555555555558e-05, "loss": 0.951, "step": 6919 }, { "epoch": 55.36, "grad_norm": 33.50431823730469, "learning_rate": 2.4831111111111112e-05, "loss": 0.6614, "step": 6920 }, { "epoch": 55.368, "grad_norm": 12.45333480834961, "learning_rate": 2.4826666666666667e-05, "loss": 0.8731, "step": 6921 }, { "epoch": 55.376, "grad_norm": 24.72174835205078, "learning_rate": 2.4822222222222225e-05, "loss": 1.024, "step": 6922 }, { "epoch": 55.384, "grad_norm": 19.628929138183594, "learning_rate": 2.481777777777778e-05, "loss": 0.6258, "step": 6923 }, { "epoch": 55.392, "grad_norm": 15.27670955657959, "learning_rate": 2.4813333333333335e-05, "loss": 0.7229, "step": 6924 }, { "epoch": 55.4, "grad_norm": 22.687498092651367, "learning_rate": 2.480888888888889e-05, "loss": 0.8239, "step": 6925 }, { "epoch": 55.408, "grad_norm": 17.946758270263672, "learning_rate": 2.4804444444444448e-05, "loss": 0.7479, "step": 6926 }, { "epoch": 55.416, "grad_norm": 34.96058654785156, "learning_rate": 2.48e-05, "loss": 1.1582, "step": 6927 }, { "epoch": 55.424, "grad_norm": 88.70795440673828, "learning_rate": 2.4795555555555558e-05, "loss": 1.4881, "step": 6928 }, { "epoch": 55.432, "grad_norm": 94.5595703125, "learning_rate": 2.4791111111111113e-05, "loss": 1.3718, "step": 6929 }, { "epoch": 55.44, "grad_norm": 57.761898040771484, "learning_rate": 2.4786666666666668e-05, "loss": 0.9905, "step": 6930 }, { "epoch": 55.448, "grad_norm": 24.618574142456055, "learning_rate": 2.4782222222222222e-05, "loss": 1.551, "step": 6931 }, { "epoch": 55.456, "grad_norm": 25.100555419921875, "learning_rate": 2.477777777777778e-05, "loss": 0.933, "step": 6932 }, { "epoch": 55.464, "grad_norm": 50.52394485473633, "learning_rate": 2.4773333333333336e-05, "loss": 2.8186, "step": 6933 }, { "epoch": 55.472, "grad_norm": 28.933086395263672, "learning_rate": 2.4768888888888887e-05, "loss": 1.1286, "step": 6934 }, { "epoch": 55.48, "grad_norm": 15.80494213104248, "learning_rate": 2.4764444444444445e-05, "loss": 0.7554, "step": 6935 }, { "epoch": 55.488, "grad_norm": 29.111347198486328, "learning_rate": 2.476e-05, "loss": 0.9224, "step": 6936 }, { "epoch": 55.496, "grad_norm": 96.76626586914062, "learning_rate": 2.475555555555556e-05, "loss": 0.7722, "step": 6937 }, { "epoch": 55.504, "grad_norm": 16.81345558166504, "learning_rate": 2.475111111111111e-05, "loss": 0.7034, "step": 6938 }, { "epoch": 55.512, "grad_norm": 20.694372177124023, "learning_rate": 2.4746666666666668e-05, "loss": 0.7574, "step": 6939 }, { "epoch": 55.52, "grad_norm": 57.17503356933594, "learning_rate": 2.4742222222222223e-05, "loss": 0.8202, "step": 6940 }, { "epoch": 55.528, "grad_norm": 31.12674903869629, "learning_rate": 2.4737777777777778e-05, "loss": 0.8615, "step": 6941 }, { "epoch": 55.536, "grad_norm": 26.17009162902832, "learning_rate": 2.4733333333333333e-05, "loss": 1.2145, "step": 6942 }, { "epoch": 55.544, "grad_norm": 29.679290771484375, "learning_rate": 2.472888888888889e-05, "loss": 0.8605, "step": 6943 }, { "epoch": 55.552, "grad_norm": 36.95155715942383, "learning_rate": 2.4724444444444446e-05, "loss": 1.2415, "step": 6944 }, { "epoch": 55.56, "grad_norm": 22.725072860717773, "learning_rate": 2.472e-05, "loss": 1.1148, "step": 6945 }, { "epoch": 55.568, "grad_norm": 43.38055419921875, "learning_rate": 2.4715555555555555e-05, "loss": 0.9221, "step": 6946 }, { "epoch": 55.576, "grad_norm": 29.40308380126953, "learning_rate": 2.4711111111111114e-05, "loss": 2.9121, "step": 6947 }, { "epoch": 55.584, "grad_norm": 14.108345031738281, "learning_rate": 2.470666666666667e-05, "loss": 0.8374, "step": 6948 }, { "epoch": 55.592, "grad_norm": 43.6452522277832, "learning_rate": 2.4702222222222223e-05, "loss": 1.3213, "step": 6949 }, { "epoch": 55.6, "grad_norm": 22.490421295166016, "learning_rate": 2.4697777777777778e-05, "loss": 1.2878, "step": 6950 }, { "epoch": 55.608, "grad_norm": 40.032588958740234, "learning_rate": 2.4693333333333336e-05, "loss": 0.8572, "step": 6951 }, { "epoch": 55.616, "grad_norm": 14.716216087341309, "learning_rate": 2.4688888888888888e-05, "loss": 0.9613, "step": 6952 }, { "epoch": 55.624, "grad_norm": 30.011693954467773, "learning_rate": 2.4684444444444446e-05, "loss": 1.3468, "step": 6953 }, { "epoch": 55.632, "grad_norm": 26.728837966918945, "learning_rate": 2.468e-05, "loss": 1.0208, "step": 6954 }, { "epoch": 55.64, "grad_norm": 30.831195831298828, "learning_rate": 2.4675555555555556e-05, "loss": 0.8042, "step": 6955 }, { "epoch": 55.648, "grad_norm": 19.905847549438477, "learning_rate": 2.467111111111111e-05, "loss": 1.1315, "step": 6956 }, { "epoch": 55.656, "grad_norm": 14.772736549377441, "learning_rate": 2.466666666666667e-05, "loss": 1.1531, "step": 6957 }, { "epoch": 55.664, "grad_norm": 46.270206451416016, "learning_rate": 2.4662222222222224e-05, "loss": 0.8496, "step": 6958 }, { "epoch": 55.672, "grad_norm": 79.50100708007812, "learning_rate": 2.465777777777778e-05, "loss": 0.975, "step": 6959 }, { "epoch": 55.68, "grad_norm": 31.630338668823242, "learning_rate": 2.4653333333333333e-05, "loss": 1.0112, "step": 6960 }, { "epoch": 55.688, "grad_norm": 28.640514373779297, "learning_rate": 2.464888888888889e-05, "loss": 0.7311, "step": 6961 }, { "epoch": 55.696, "grad_norm": 17.223289489746094, "learning_rate": 2.4644444444444446e-05, "loss": 0.6277, "step": 6962 }, { "epoch": 55.704, "grad_norm": 31.367708206176758, "learning_rate": 2.464e-05, "loss": 0.983, "step": 6963 }, { "epoch": 55.712, "grad_norm": 24.094079971313477, "learning_rate": 2.4635555555555556e-05, "loss": 1.0145, "step": 6964 }, { "epoch": 55.72, "grad_norm": 19.557994842529297, "learning_rate": 2.4631111111111114e-05, "loss": 0.8148, "step": 6965 }, { "epoch": 55.728, "grad_norm": 50.35662841796875, "learning_rate": 2.4626666666666666e-05, "loss": 0.9237, "step": 6966 }, { "epoch": 55.736, "grad_norm": 40.69440460205078, "learning_rate": 2.4622222222222224e-05, "loss": 2.3426, "step": 6967 }, { "epoch": 55.744, "grad_norm": 34.77618408203125, "learning_rate": 2.461777777777778e-05, "loss": 0.7333, "step": 6968 }, { "epoch": 55.752, "grad_norm": 36.93511199951172, "learning_rate": 2.4613333333333337e-05, "loss": 0.9054, "step": 6969 }, { "epoch": 55.76, "grad_norm": 44.709041595458984, "learning_rate": 2.460888888888889e-05, "loss": 0.7273, "step": 6970 }, { "epoch": 55.768, "grad_norm": 20.638399124145508, "learning_rate": 2.4604444444444447e-05, "loss": 1.2952, "step": 6971 }, { "epoch": 55.776, "grad_norm": 26.169170379638672, "learning_rate": 2.46e-05, "loss": 0.9185, "step": 6972 }, { "epoch": 55.784, "grad_norm": 20.426950454711914, "learning_rate": 2.4595555555555556e-05, "loss": 0.6257, "step": 6973 }, { "epoch": 55.792, "grad_norm": 40.56918716430664, "learning_rate": 2.459111111111111e-05, "loss": 0.9845, "step": 6974 }, { "epoch": 55.8, "grad_norm": 20.666385650634766, "learning_rate": 2.458666666666667e-05, "loss": 0.8877, "step": 6975 }, { "epoch": 55.808, "grad_norm": 57.340606689453125, "learning_rate": 2.4582222222222224e-05, "loss": 1.0778, "step": 6976 }, { "epoch": 55.816, "grad_norm": 48.379234313964844, "learning_rate": 2.457777777777778e-05, "loss": 0.9926, "step": 6977 }, { "epoch": 55.824, "grad_norm": 34.37273025512695, "learning_rate": 2.4573333333333334e-05, "loss": 0.8535, "step": 6978 }, { "epoch": 55.832, "grad_norm": 16.41437339782715, "learning_rate": 2.4568888888888892e-05, "loss": 0.9917, "step": 6979 }, { "epoch": 55.84, "grad_norm": 38.98385238647461, "learning_rate": 2.4564444444444444e-05, "loss": 1.0704, "step": 6980 }, { "epoch": 55.848, "grad_norm": 21.993724822998047, "learning_rate": 2.4560000000000002e-05, "loss": 0.9411, "step": 6981 }, { "epoch": 55.856, "grad_norm": 20.638141632080078, "learning_rate": 2.4555555555555557e-05, "loss": 0.9601, "step": 6982 }, { "epoch": 55.864, "grad_norm": 26.13624382019043, "learning_rate": 2.4551111111111115e-05, "loss": 1.0782, "step": 6983 }, { "epoch": 55.872, "grad_norm": 23.461231231689453, "learning_rate": 2.4546666666666667e-05, "loss": 0.9365, "step": 6984 }, { "epoch": 55.88, "grad_norm": 25.479490280151367, "learning_rate": 2.4542222222222225e-05, "loss": 1.209, "step": 6985 }, { "epoch": 55.888, "grad_norm": 40.82088088989258, "learning_rate": 2.453777777777778e-05, "loss": 1.0794, "step": 6986 }, { "epoch": 55.896, "grad_norm": 36.00241470336914, "learning_rate": 2.4533333333333334e-05, "loss": 0.981, "step": 6987 }, { "epoch": 55.904, "grad_norm": 23.310239791870117, "learning_rate": 2.452888888888889e-05, "loss": 1.2919, "step": 6988 }, { "epoch": 55.912, "grad_norm": 38.34510040283203, "learning_rate": 2.4524444444444444e-05, "loss": 0.5921, "step": 6989 }, { "epoch": 55.92, "grad_norm": 24.755571365356445, "learning_rate": 2.4520000000000002e-05, "loss": 0.8026, "step": 6990 }, { "epoch": 55.928, "grad_norm": 21.478225708007812, "learning_rate": 2.4515555555555554e-05, "loss": 1.1978, "step": 6991 }, { "epoch": 55.936, "grad_norm": 21.84963035583496, "learning_rate": 2.4511111111111112e-05, "loss": 1.0956, "step": 6992 }, { "epoch": 55.944, "grad_norm": 19.780982971191406, "learning_rate": 2.4506666666666667e-05, "loss": 1.175, "step": 6993 }, { "epoch": 55.952, "grad_norm": 31.046918869018555, "learning_rate": 2.4502222222222225e-05, "loss": 0.884, "step": 6994 }, { "epoch": 55.96, "grad_norm": 26.702539443969727, "learning_rate": 2.4497777777777777e-05, "loss": 1.1982, "step": 6995 }, { "epoch": 55.968, "grad_norm": 28.776687622070312, "learning_rate": 2.4493333333333335e-05, "loss": 1.4384, "step": 6996 }, { "epoch": 55.976, "grad_norm": 33.04911422729492, "learning_rate": 2.448888888888889e-05, "loss": 1.04, "step": 6997 }, { "epoch": 55.984, "grad_norm": 16.56674575805664, "learning_rate": 2.4484444444444445e-05, "loss": 1.4596, "step": 6998 }, { "epoch": 55.992, "grad_norm": 25.005605697631836, "learning_rate": 2.448e-05, "loss": 0.933, "step": 6999 }, { "epoch": 56.0, "grad_norm": 23.326669692993164, "learning_rate": 2.4475555555555558e-05, "loss": 0.6819, "step": 7000 }, { "epoch": 56.0, "eval_loss": 1.051561713218689, "eval_map": 0.3943, "eval_map_50": 0.7317, "eval_map_75": 0.3754, "eval_map_Coverall": 0.6552, "eval_map_Face_Shield": 0.4872, "eval_map_Gloves": 0.273, "eval_map_Goggles": 0.1882, "eval_map_Mask": 0.3682, "eval_map_large": 0.6074, "eval_map_medium": 0.2489, "eval_map_small": 0.3765, "eval_mar_1": 0.3106, "eval_mar_10": 0.5485, "eval_mar_100": 0.5622, "eval_mar_100_Coverall": 0.7644, "eval_mar_100_Face_Shield": 0.7412, "eval_mar_100_Gloves": 0.4, "eval_mar_100_Goggles": 0.4344, "eval_mar_100_Mask": 0.4712, "eval_mar_large": 0.6975, "eval_mar_medium": 0.4136, "eval_mar_small": 0.431, "eval_runtime": 0.9369, "eval_samples_per_second": 30.952, "eval_steps_per_second": 2.135, "step": 7000 }, { "epoch": 56.008, "grad_norm": 26.79446792602539, "learning_rate": 2.4471111111111112e-05, "loss": 1.5047, "step": 7001 }, { "epoch": 56.016, "grad_norm": 21.591354370117188, "learning_rate": 2.4466666666666667e-05, "loss": 1.328, "step": 7002 }, { "epoch": 56.024, "grad_norm": 13.926490783691406, "learning_rate": 2.4462222222222222e-05, "loss": 0.995, "step": 7003 }, { "epoch": 56.032, "grad_norm": 25.18003273010254, "learning_rate": 2.445777777777778e-05, "loss": 0.8659, "step": 7004 }, { "epoch": 56.04, "grad_norm": 18.912853240966797, "learning_rate": 2.4453333333333335e-05, "loss": 1.0615, "step": 7005 }, { "epoch": 56.048, "grad_norm": 37.81708526611328, "learning_rate": 2.444888888888889e-05, "loss": 0.8465, "step": 7006 }, { "epoch": 56.056, "grad_norm": 25.11304473876953, "learning_rate": 2.4444444444444445e-05, "loss": 0.7821, "step": 7007 }, { "epoch": 56.064, "grad_norm": 15.05955696105957, "learning_rate": 2.4440000000000003e-05, "loss": 0.9419, "step": 7008 }, { "epoch": 56.072, "grad_norm": 24.7806339263916, "learning_rate": 2.4435555555555555e-05, "loss": 0.7525, "step": 7009 }, { "epoch": 56.08, "grad_norm": 21.703575134277344, "learning_rate": 2.4431111111111113e-05, "loss": 1.0288, "step": 7010 }, { "epoch": 56.088, "grad_norm": 25.41188621520996, "learning_rate": 2.4426666666666668e-05, "loss": 0.764, "step": 7011 }, { "epoch": 56.096, "grad_norm": 31.14933204650879, "learning_rate": 2.4422222222222223e-05, "loss": 0.7734, "step": 7012 }, { "epoch": 56.104, "grad_norm": 58.21792984008789, "learning_rate": 2.4417777777777777e-05, "loss": 0.885, "step": 7013 }, { "epoch": 56.112, "grad_norm": 15.665755271911621, "learning_rate": 2.4413333333333336e-05, "loss": 1.0377, "step": 7014 }, { "epoch": 56.12, "grad_norm": 22.226594924926758, "learning_rate": 2.440888888888889e-05, "loss": 1.9115, "step": 7015 }, { "epoch": 56.128, "grad_norm": 20.45534324645996, "learning_rate": 2.4404444444444445e-05, "loss": 1.3541, "step": 7016 }, { "epoch": 56.136, "grad_norm": 23.82978057861328, "learning_rate": 2.44e-05, "loss": 0.8397, "step": 7017 }, { "epoch": 56.144, "grad_norm": 17.63360595703125, "learning_rate": 2.439555555555556e-05, "loss": 1.0576, "step": 7018 }, { "epoch": 56.152, "grad_norm": 26.599092483520508, "learning_rate": 2.4391111111111113e-05, "loss": 0.7011, "step": 7019 }, { "epoch": 56.16, "grad_norm": 15.326096534729004, "learning_rate": 2.4386666666666668e-05, "loss": 0.6927, "step": 7020 }, { "epoch": 56.168, "grad_norm": 19.498733520507812, "learning_rate": 2.4382222222222223e-05, "loss": 1.2925, "step": 7021 }, { "epoch": 56.176, "grad_norm": 23.547588348388672, "learning_rate": 2.437777777777778e-05, "loss": 0.7124, "step": 7022 }, { "epoch": 56.184, "grad_norm": 20.574493408203125, "learning_rate": 2.4373333333333333e-05, "loss": 1.1069, "step": 7023 }, { "epoch": 56.192, "grad_norm": 19.39876937866211, "learning_rate": 2.436888888888889e-05, "loss": 1.0324, "step": 7024 }, { "epoch": 56.2, "grad_norm": 27.525909423828125, "learning_rate": 2.4364444444444446e-05, "loss": 1.1439, "step": 7025 }, { "epoch": 56.208, "grad_norm": 48.34454345703125, "learning_rate": 2.4360000000000004e-05, "loss": 0.7696, "step": 7026 }, { "epoch": 56.216, "grad_norm": 28.552186965942383, "learning_rate": 2.4355555555555555e-05, "loss": 0.9464, "step": 7027 }, { "epoch": 56.224, "grad_norm": 19.77334976196289, "learning_rate": 2.4351111111111114e-05, "loss": 0.8477, "step": 7028 }, { "epoch": 56.232, "grad_norm": 23.309993743896484, "learning_rate": 2.434666666666667e-05, "loss": 0.8273, "step": 7029 }, { "epoch": 56.24, "grad_norm": 22.077682495117188, "learning_rate": 2.4342222222222223e-05, "loss": 0.8452, "step": 7030 }, { "epoch": 56.248, "grad_norm": 15.77646541595459, "learning_rate": 2.4337777777777778e-05, "loss": 1.2208, "step": 7031 }, { "epoch": 56.256, "grad_norm": 20.862064361572266, "learning_rate": 2.4333333333333336e-05, "loss": 1.0946, "step": 7032 }, { "epoch": 56.264, "grad_norm": 17.593982696533203, "learning_rate": 2.432888888888889e-05, "loss": 0.8725, "step": 7033 }, { "epoch": 56.272, "grad_norm": 31.84588623046875, "learning_rate": 2.4324444444444446e-05, "loss": 1.0831, "step": 7034 }, { "epoch": 56.28, "grad_norm": 39.487586975097656, "learning_rate": 2.432e-05, "loss": 1.5538, "step": 7035 }, { "epoch": 56.288, "grad_norm": 317.02777099609375, "learning_rate": 2.431555555555556e-05, "loss": 0.9809, "step": 7036 }, { "epoch": 56.296, "grad_norm": 41.6801643371582, "learning_rate": 2.431111111111111e-05, "loss": 0.8153, "step": 7037 }, { "epoch": 56.304, "grad_norm": 21.00836944580078, "learning_rate": 2.4306666666666665e-05, "loss": 0.973, "step": 7038 }, { "epoch": 56.312, "grad_norm": 17.866003036499023, "learning_rate": 2.4302222222222224e-05, "loss": 0.7909, "step": 7039 }, { "epoch": 56.32, "grad_norm": 29.774377822875977, "learning_rate": 2.429777777777778e-05, "loss": 1.0379, "step": 7040 }, { "epoch": 56.328, "grad_norm": 52.442893981933594, "learning_rate": 2.4293333333333333e-05, "loss": 2.3584, "step": 7041 }, { "epoch": 56.336, "grad_norm": 23.663997650146484, "learning_rate": 2.4288888888888888e-05, "loss": 0.8648, "step": 7042 }, { "epoch": 56.344, "grad_norm": 28.31568717956543, "learning_rate": 2.4284444444444446e-05, "loss": 1.0483, "step": 7043 }, { "epoch": 56.352, "grad_norm": 32.83280944824219, "learning_rate": 2.428e-05, "loss": 0.7197, "step": 7044 }, { "epoch": 56.36, "grad_norm": 16.357091903686523, "learning_rate": 2.4275555555555556e-05, "loss": 0.787, "step": 7045 }, { "epoch": 56.368, "grad_norm": 27.67878532409668, "learning_rate": 2.427111111111111e-05, "loss": 1.0058, "step": 7046 }, { "epoch": 56.376, "grad_norm": 51.61876678466797, "learning_rate": 2.426666666666667e-05, "loss": 0.944, "step": 7047 }, { "epoch": 56.384, "grad_norm": 40.21149444580078, "learning_rate": 2.426222222222222e-05, "loss": 1.3013, "step": 7048 }, { "epoch": 56.392, "grad_norm": 21.284404754638672, "learning_rate": 2.425777777777778e-05, "loss": 0.7377, "step": 7049 }, { "epoch": 56.4, "grad_norm": 87.77851104736328, "learning_rate": 2.4253333333333334e-05, "loss": 0.8737, "step": 7050 }, { "epoch": 56.408, "grad_norm": Infinity, "learning_rate": 2.4253333333333334e-05, "loss": 0.8887, "step": 7051 }, { "epoch": 56.416, "grad_norm": 33.64824676513672, "learning_rate": 2.4248888888888892e-05, "loss": 1.0011, "step": 7052 }, { "epoch": 56.424, "grad_norm": 23.920948028564453, "learning_rate": 2.4244444444444443e-05, "loss": 1.3409, "step": 7053 }, { "epoch": 56.432, "grad_norm": 45.391998291015625, "learning_rate": 2.4240000000000002e-05, "loss": 1.2165, "step": 7054 }, { "epoch": 56.44, "grad_norm": 41.416709899902344, "learning_rate": 2.4235555555555557e-05, "loss": 0.7061, "step": 7055 }, { "epoch": 56.448, "grad_norm": 23.132488250732422, "learning_rate": 2.423111111111111e-05, "loss": 1.0221, "step": 7056 }, { "epoch": 56.456, "grad_norm": 55.8602409362793, "learning_rate": 2.4226666666666666e-05, "loss": 0.9346, "step": 7057 }, { "epoch": 56.464, "grad_norm": 24.523130416870117, "learning_rate": 2.4222222222222224e-05, "loss": 0.874, "step": 7058 }, { "epoch": 56.472, "grad_norm": 25.61164665222168, "learning_rate": 2.421777777777778e-05, "loss": 0.7729, "step": 7059 }, { "epoch": 56.48, "grad_norm": 80.76461029052734, "learning_rate": 2.4213333333333334e-05, "loss": 1.0151, "step": 7060 }, { "epoch": 56.488, "grad_norm": 26.130943298339844, "learning_rate": 2.420888888888889e-05, "loss": 0.7426, "step": 7061 }, { "epoch": 56.496, "grad_norm": 23.13547134399414, "learning_rate": 2.4204444444444447e-05, "loss": 1.027, "step": 7062 }, { "epoch": 56.504, "grad_norm": 33.04946517944336, "learning_rate": 2.4200000000000002e-05, "loss": 0.8905, "step": 7063 }, { "epoch": 56.512, "grad_norm": 49.23081588745117, "learning_rate": 2.4195555555555557e-05, "loss": 0.9612, "step": 7064 }, { "epoch": 56.52, "grad_norm": 37.573081970214844, "learning_rate": 2.4191111111111112e-05, "loss": 0.9156, "step": 7065 }, { "epoch": 56.528, "grad_norm": 24.367542266845703, "learning_rate": 2.418666666666667e-05, "loss": 1.3533, "step": 7066 }, { "epoch": 56.536, "grad_norm": 53.71742248535156, "learning_rate": 2.418222222222222e-05, "loss": 1.2556, "step": 7067 }, { "epoch": 56.544, "grad_norm": 20.601333618164062, "learning_rate": 2.417777777777778e-05, "loss": 0.9651, "step": 7068 }, { "epoch": 56.552, "grad_norm": 41.13465118408203, "learning_rate": 2.4173333333333335e-05, "loss": 1.7442, "step": 7069 }, { "epoch": 56.56, "grad_norm": 19.021739959716797, "learning_rate": 2.416888888888889e-05, "loss": 0.9487, "step": 7070 }, { "epoch": 56.568, "grad_norm": 164.8973388671875, "learning_rate": 2.4164444444444444e-05, "loss": 3.0039, "step": 7071 }, { "epoch": 56.576, "grad_norm": 17.302513122558594, "learning_rate": 2.4160000000000002e-05, "loss": 0.7153, "step": 7072 }, { "epoch": 56.584, "grad_norm": 20.463848114013672, "learning_rate": 2.4155555555555557e-05, "loss": 1.1651, "step": 7073 }, { "epoch": 56.592, "grad_norm": 14.420146942138672, "learning_rate": 2.4151111111111112e-05, "loss": 1.4458, "step": 7074 }, { "epoch": 56.6, "grad_norm": 21.267234802246094, "learning_rate": 2.4146666666666667e-05, "loss": 1.3892, "step": 7075 }, { "epoch": 56.608, "grad_norm": 27.069894790649414, "learning_rate": 2.4142222222222225e-05, "loss": 1.0042, "step": 7076 }, { "epoch": 56.616, "grad_norm": 29.728618621826172, "learning_rate": 2.413777777777778e-05, "loss": 0.8822, "step": 7077 }, { "epoch": 56.624, "grad_norm": 36.70062255859375, "learning_rate": 2.4133333333333335e-05, "loss": 1.2233, "step": 7078 }, { "epoch": 56.632, "grad_norm": 16.230688095092773, "learning_rate": 2.412888888888889e-05, "loss": 1.0701, "step": 7079 }, { "epoch": 56.64, "grad_norm": 21.458026885986328, "learning_rate": 2.4124444444444448e-05, "loss": 0.761, "step": 7080 }, { "epoch": 56.648, "grad_norm": 31.764522552490234, "learning_rate": 2.412e-05, "loss": 0.8574, "step": 7081 }, { "epoch": 56.656, "grad_norm": 36.02870559692383, "learning_rate": 2.4115555555555558e-05, "loss": 1.1459, "step": 7082 }, { "epoch": 56.664, "grad_norm": 22.285919189453125, "learning_rate": 2.4111111111111113e-05, "loss": 1.0712, "step": 7083 }, { "epoch": 56.672, "grad_norm": 18.865440368652344, "learning_rate": 2.4106666666666667e-05, "loss": 1.0736, "step": 7084 }, { "epoch": 56.68, "grad_norm": 89.9365005493164, "learning_rate": 2.4102222222222222e-05, "loss": 0.9167, "step": 7085 }, { "epoch": 56.688, "grad_norm": 37.973365783691406, "learning_rate": 2.409777777777778e-05, "loss": 0.7799, "step": 7086 }, { "epoch": 56.696, "grad_norm": 40.870121002197266, "learning_rate": 2.4093333333333335e-05, "loss": 0.9856, "step": 7087 }, { "epoch": 56.704, "grad_norm": 23.1013126373291, "learning_rate": 2.408888888888889e-05, "loss": 0.9169, "step": 7088 }, { "epoch": 56.712, "grad_norm": 24.773395538330078, "learning_rate": 2.4084444444444445e-05, "loss": 1.1051, "step": 7089 }, { "epoch": 56.72, "grad_norm": 26.081876754760742, "learning_rate": 2.408e-05, "loss": 0.7324, "step": 7090 }, { "epoch": 56.728, "grad_norm": 39.58948516845703, "learning_rate": 2.4075555555555558e-05, "loss": 0.7206, "step": 7091 }, { "epoch": 56.736, "grad_norm": 15.035881042480469, "learning_rate": 2.407111111111111e-05, "loss": 1.0417, "step": 7092 }, { "epoch": 56.744, "grad_norm": 19.367334365844727, "learning_rate": 2.4066666666666668e-05, "loss": 1.0801, "step": 7093 }, { "epoch": 56.752, "grad_norm": 35.29899215698242, "learning_rate": 2.4062222222222223e-05, "loss": 1.9757, "step": 7094 }, { "epoch": 56.76, "grad_norm": 20.162893295288086, "learning_rate": 2.4057777777777777e-05, "loss": 1.1758, "step": 7095 }, { "epoch": 56.768, "grad_norm": 23.81424331665039, "learning_rate": 2.4053333333333332e-05, "loss": 1.0651, "step": 7096 }, { "epoch": 56.776, "grad_norm": 17.694202423095703, "learning_rate": 2.404888888888889e-05, "loss": 1.1086, "step": 7097 }, { "epoch": 56.784, "grad_norm": 29.05047607421875, "learning_rate": 2.4044444444444445e-05, "loss": 0.9783, "step": 7098 }, { "epoch": 56.792, "grad_norm": 25.399234771728516, "learning_rate": 2.404e-05, "loss": 0.7468, "step": 7099 }, { "epoch": 56.8, "grad_norm": 22.591190338134766, "learning_rate": 2.4035555555555555e-05, "loss": 0.8084, "step": 7100 }, { "epoch": 56.808, "grad_norm": 31.21339988708496, "learning_rate": 2.4031111111111113e-05, "loss": 0.8441, "step": 7101 }, { "epoch": 56.816, "grad_norm": 33.22804641723633, "learning_rate": 2.4026666666666668e-05, "loss": 1.2985, "step": 7102 }, { "epoch": 56.824, "grad_norm": 21.19317054748535, "learning_rate": 2.4022222222222223e-05, "loss": 0.9917, "step": 7103 }, { "epoch": 56.832, "grad_norm": 64.70601654052734, "learning_rate": 2.4017777777777778e-05, "loss": 0.8997, "step": 7104 }, { "epoch": 56.84, "grad_norm": 11.200053215026855, "learning_rate": 2.4013333333333336e-05, "loss": 0.723, "step": 7105 }, { "epoch": 56.848, "grad_norm": 18.692201614379883, "learning_rate": 2.4008888888888888e-05, "loss": 0.7836, "step": 7106 }, { "epoch": 56.856, "grad_norm": 19.054481506347656, "learning_rate": 2.4004444444444446e-05, "loss": 1.1196, "step": 7107 }, { "epoch": 56.864, "grad_norm": 33.251075744628906, "learning_rate": 2.4e-05, "loss": 1.0667, "step": 7108 }, { "epoch": 56.872, "grad_norm": 80.1279525756836, "learning_rate": 2.399555555555556e-05, "loss": 1.3637, "step": 7109 }, { "epoch": 56.88, "grad_norm": 28.52190589904785, "learning_rate": 2.399111111111111e-05, "loss": 0.8586, "step": 7110 }, { "epoch": 56.888, "grad_norm": 26.291940689086914, "learning_rate": 2.398666666666667e-05, "loss": 0.8141, "step": 7111 }, { "epoch": 56.896, "grad_norm": 29.10265350341797, "learning_rate": 2.3982222222222223e-05, "loss": 1.1441, "step": 7112 }, { "epoch": 56.904, "grad_norm": 28.181560516357422, "learning_rate": 2.3977777777777778e-05, "loss": 1.0456, "step": 7113 }, { "epoch": 56.912, "grad_norm": 20.918853759765625, "learning_rate": 2.3973333333333333e-05, "loss": 0.728, "step": 7114 }, { "epoch": 56.92, "grad_norm": 28.40667152404785, "learning_rate": 2.396888888888889e-05, "loss": 0.9681, "step": 7115 }, { "epoch": 56.928, "grad_norm": 33.67899703979492, "learning_rate": 2.3964444444444446e-05, "loss": 0.865, "step": 7116 }, { "epoch": 56.936, "grad_norm": 22.171438217163086, "learning_rate": 2.396e-05, "loss": 0.8956, "step": 7117 }, { "epoch": 56.944, "grad_norm": 27.803775787353516, "learning_rate": 2.3955555555555556e-05, "loss": 0.9185, "step": 7118 }, { "epoch": 56.952, "grad_norm": 31.835107803344727, "learning_rate": 2.3951111111111114e-05, "loss": 0.9613, "step": 7119 }, { "epoch": 56.96, "grad_norm": 22.702253341674805, "learning_rate": 2.394666666666667e-05, "loss": 0.7938, "step": 7120 }, { "epoch": 56.968, "grad_norm": 21.131729125976562, "learning_rate": 2.3942222222222224e-05, "loss": 1.1952, "step": 7121 }, { "epoch": 56.976, "grad_norm": 12.195202827453613, "learning_rate": 2.393777777777778e-05, "loss": 0.9326, "step": 7122 }, { "epoch": 56.984, "grad_norm": 44.32327651977539, "learning_rate": 2.3933333333333337e-05, "loss": 0.924, "step": 7123 }, { "epoch": 56.992, "grad_norm": 22.123533248901367, "learning_rate": 2.3928888888888888e-05, "loss": 1.148, "step": 7124 }, { "epoch": 57.0, "grad_norm": 32.615142822265625, "learning_rate": 2.3924444444444447e-05, "loss": 0.9345, "step": 7125 }, { "epoch": 57.0, "eval_loss": 1.0897289514541626, "eval_map": 0.4123, "eval_map_50": 0.7312, "eval_map_75": 0.3851, "eval_map_Coverall": 0.6635, "eval_map_Face_Shield": 0.4926, "eval_map_Gloves": 0.3378, "eval_map_Goggles": 0.1861, "eval_map_Mask": 0.3818, "eval_map_large": 0.6061, "eval_map_medium": 0.2588, "eval_map_small": 0.3185, "eval_mar_1": 0.3147, "eval_mar_10": 0.5308, "eval_mar_100": 0.5449, "eval_mar_100_Coverall": 0.7667, "eval_mar_100_Face_Shield": 0.6588, "eval_mar_100_Gloves": 0.423, "eval_mar_100_Goggles": 0.4031, "eval_mar_100_Mask": 0.4731, "eval_mar_large": 0.7023, "eval_mar_medium": 0.4015, "eval_mar_small": 0.3439, "eval_runtime": 0.9233, "eval_samples_per_second": 31.411, "eval_steps_per_second": 2.166, "step": 7125 }, { "epoch": 57.008, "grad_norm": 19.98782730102539, "learning_rate": 2.392e-05, "loss": 0.8575, "step": 7126 }, { "epoch": 57.016, "grad_norm": 26.530778884887695, "learning_rate": 2.3915555555555556e-05, "loss": 1.0445, "step": 7127 }, { "epoch": 57.024, "grad_norm": 15.289998054504395, "learning_rate": 2.391111111111111e-05, "loss": 0.9203, "step": 7128 }, { "epoch": 57.032, "grad_norm": 18.705610275268555, "learning_rate": 2.390666666666667e-05, "loss": 0.6336, "step": 7129 }, { "epoch": 57.04, "grad_norm": 43.212867736816406, "learning_rate": 2.3902222222222224e-05, "loss": 0.9915, "step": 7130 }, { "epoch": 57.048, "grad_norm": 19.180370330810547, "learning_rate": 2.389777777777778e-05, "loss": 0.9395, "step": 7131 }, { "epoch": 57.056, "grad_norm": 36.72553634643555, "learning_rate": 2.3893333333333334e-05, "loss": 1.2842, "step": 7132 }, { "epoch": 57.064, "grad_norm": 18.0058650970459, "learning_rate": 2.3888888888888892e-05, "loss": 1.0292, "step": 7133 }, { "epoch": 57.072, "grad_norm": 10.497113227844238, "learning_rate": 2.3884444444444447e-05, "loss": 0.8997, "step": 7134 }, { "epoch": 57.08, "grad_norm": 27.261499404907227, "learning_rate": 2.3880000000000002e-05, "loss": 0.9212, "step": 7135 }, { "epoch": 57.088, "grad_norm": 16.11408233642578, "learning_rate": 2.3875555555555557e-05, "loss": 0.8951, "step": 7136 }, { "epoch": 57.096, "grad_norm": 22.85902976989746, "learning_rate": 2.3871111111111115e-05, "loss": 1.3357, "step": 7137 }, { "epoch": 57.104, "grad_norm": 33.63385009765625, "learning_rate": 2.3866666666666666e-05, "loss": 0.6389, "step": 7138 }, { "epoch": 57.112, "grad_norm": 13.657966613769531, "learning_rate": 2.3862222222222225e-05, "loss": 1.0959, "step": 7139 }, { "epoch": 57.12, "grad_norm": 27.470918655395508, "learning_rate": 2.385777777777778e-05, "loss": 0.5433, "step": 7140 }, { "epoch": 57.128, "grad_norm": 30.524219512939453, "learning_rate": 2.3853333333333334e-05, "loss": 0.9021, "step": 7141 }, { "epoch": 57.136, "grad_norm": 37.92475128173828, "learning_rate": 2.384888888888889e-05, "loss": 2.534, "step": 7142 }, { "epoch": 57.144, "grad_norm": 36.966094970703125, "learning_rate": 2.3844444444444444e-05, "loss": 0.8685, "step": 7143 }, { "epoch": 57.152, "grad_norm": 12.965096473693848, "learning_rate": 2.3840000000000002e-05, "loss": 1.1662, "step": 7144 }, { "epoch": 57.16, "grad_norm": 24.17544174194336, "learning_rate": 2.3835555555555557e-05, "loss": 0.9063, "step": 7145 }, { "epoch": 57.168, "grad_norm": 31.46710968017578, "learning_rate": 2.3831111111111112e-05, "loss": 2.0507, "step": 7146 }, { "epoch": 57.176, "grad_norm": 26.149494171142578, "learning_rate": 2.3826666666666667e-05, "loss": 0.7472, "step": 7147 }, { "epoch": 57.184, "grad_norm": 38.51366424560547, "learning_rate": 2.3822222222222225e-05, "loss": 1.5788, "step": 7148 }, { "epoch": 57.192, "grad_norm": 24.918617248535156, "learning_rate": 2.3817777777777776e-05, "loss": 0.7805, "step": 7149 }, { "epoch": 57.2, "grad_norm": 19.62616539001465, "learning_rate": 2.3813333333333335e-05, "loss": 1.2732, "step": 7150 }, { "epoch": 57.208, "grad_norm": 17.48796844482422, "learning_rate": 2.380888888888889e-05, "loss": 0.931, "step": 7151 }, { "epoch": 57.216, "grad_norm": 17.898710250854492, "learning_rate": 2.3804444444444444e-05, "loss": 1.0547, "step": 7152 }, { "epoch": 57.224, "grad_norm": 18.14034652709961, "learning_rate": 2.38e-05, "loss": 1.2305, "step": 7153 }, { "epoch": 57.232, "grad_norm": 25.250276565551758, "learning_rate": 2.3795555555555557e-05, "loss": 1.1551, "step": 7154 }, { "epoch": 57.24, "grad_norm": 21.05245018005371, "learning_rate": 2.3791111111111112e-05, "loss": 0.81, "step": 7155 }, { "epoch": 57.248, "grad_norm": 16.988937377929688, "learning_rate": 2.3786666666666667e-05, "loss": 1.1745, "step": 7156 }, { "epoch": 57.256, "grad_norm": 25.823022842407227, "learning_rate": 2.3782222222222222e-05, "loss": 1.3442, "step": 7157 }, { "epoch": 57.264, "grad_norm": 15.809890747070312, "learning_rate": 2.377777777777778e-05, "loss": 0.595, "step": 7158 }, { "epoch": 57.272, "grad_norm": 18.30784797668457, "learning_rate": 2.3773333333333335e-05, "loss": 0.6375, "step": 7159 }, { "epoch": 57.28, "grad_norm": 19.06962776184082, "learning_rate": 2.376888888888889e-05, "loss": 1.175, "step": 7160 }, { "epoch": 57.288, "grad_norm": 33.88539505004883, "learning_rate": 2.3764444444444445e-05, "loss": 0.6, "step": 7161 }, { "epoch": 57.296, "grad_norm": 15.90397834777832, "learning_rate": 2.3760000000000003e-05, "loss": 1.2781, "step": 7162 }, { "epoch": 57.304, "grad_norm": 28.662212371826172, "learning_rate": 2.3755555555555554e-05, "loss": 1.0765, "step": 7163 }, { "epoch": 57.312, "grad_norm": 23.593505859375, "learning_rate": 2.3751111111111113e-05, "loss": 1.2277, "step": 7164 }, { "epoch": 57.32, "grad_norm": 17.461074829101562, "learning_rate": 2.3746666666666667e-05, "loss": 0.7332, "step": 7165 }, { "epoch": 57.328, "grad_norm": 50.08491134643555, "learning_rate": 2.3742222222222226e-05, "loss": 1.2308, "step": 7166 }, { "epoch": 57.336, "grad_norm": 29.745630264282227, "learning_rate": 2.3737777777777777e-05, "loss": 1.144, "step": 7167 }, { "epoch": 57.344, "grad_norm": 23.682424545288086, "learning_rate": 2.3733333333333335e-05, "loss": 0.8713, "step": 7168 }, { "epoch": 57.352, "grad_norm": 29.556108474731445, "learning_rate": 2.372888888888889e-05, "loss": 0.9355, "step": 7169 }, { "epoch": 57.36, "grad_norm": 21.755590438842773, "learning_rate": 2.3724444444444445e-05, "loss": 1.2731, "step": 7170 }, { "epoch": 57.368, "grad_norm": 29.177915573120117, "learning_rate": 2.372e-05, "loss": 1.0747, "step": 7171 }, { "epoch": 57.376, "grad_norm": 22.73201560974121, "learning_rate": 2.3715555555555558e-05, "loss": 1.0458, "step": 7172 }, { "epoch": 57.384, "grad_norm": 32.6108512878418, "learning_rate": 2.3711111111111113e-05, "loss": 0.7228, "step": 7173 }, { "epoch": 57.392, "grad_norm": 19.643503189086914, "learning_rate": 2.3706666666666668e-05, "loss": 1.3623, "step": 7174 }, { "epoch": 57.4, "grad_norm": 26.532875061035156, "learning_rate": 2.3702222222222223e-05, "loss": 0.6849, "step": 7175 }, { "epoch": 57.408, "grad_norm": 33.2724609375, "learning_rate": 2.369777777777778e-05, "loss": 1.3296, "step": 7176 }, { "epoch": 57.416, "grad_norm": 10.02466106414795, "learning_rate": 2.3693333333333332e-05, "loss": 0.7496, "step": 7177 }, { "epoch": 57.424, "grad_norm": 28.730125427246094, "learning_rate": 2.368888888888889e-05, "loss": 1.0229, "step": 7178 }, { "epoch": 57.432, "grad_norm": 34.84096145629883, "learning_rate": 2.3684444444444445e-05, "loss": 0.7952, "step": 7179 }, { "epoch": 57.44, "grad_norm": 81.27981567382812, "learning_rate": 2.3680000000000004e-05, "loss": 0.9853, "step": 7180 }, { "epoch": 57.448, "grad_norm": 35.26433563232422, "learning_rate": 2.3675555555555555e-05, "loss": 2.1369, "step": 7181 }, { "epoch": 57.456, "grad_norm": 175.7296905517578, "learning_rate": 2.3671111111111113e-05, "loss": 0.8071, "step": 7182 }, { "epoch": 57.464, "grad_norm": 54.587440490722656, "learning_rate": 2.3666666666666668e-05, "loss": 0.6851, "step": 7183 }, { "epoch": 57.472, "grad_norm": 17.993192672729492, "learning_rate": 2.3662222222222223e-05, "loss": 1.0759, "step": 7184 }, { "epoch": 57.48, "grad_norm": 36.508113861083984, "learning_rate": 2.3657777777777778e-05, "loss": 0.8935, "step": 7185 }, { "epoch": 57.488, "grad_norm": 17.302297592163086, "learning_rate": 2.3653333333333336e-05, "loss": 0.8131, "step": 7186 }, { "epoch": 57.496, "grad_norm": 211.60342407226562, "learning_rate": 2.364888888888889e-05, "loss": 0.8882, "step": 7187 }, { "epoch": 57.504, "grad_norm": 31.07349395751953, "learning_rate": 2.3644444444444446e-05, "loss": 0.8158, "step": 7188 }, { "epoch": 57.512, "grad_norm": 24.99361228942871, "learning_rate": 2.364e-05, "loss": 0.9395, "step": 7189 }, { "epoch": 57.52, "grad_norm": 49.10519027709961, "learning_rate": 2.363555555555556e-05, "loss": 0.9751, "step": 7190 }, { "epoch": 57.528, "grad_norm": 29.45414161682129, "learning_rate": 2.3631111111111114e-05, "loss": 0.9889, "step": 7191 }, { "epoch": 57.536, "grad_norm": 41.13189697265625, "learning_rate": 2.362666666666667e-05, "loss": 2.9713, "step": 7192 }, { "epoch": 57.544, "grad_norm": 30.898239135742188, "learning_rate": 2.3622222222222223e-05, "loss": 0.8364, "step": 7193 }, { "epoch": 57.552, "grad_norm": 38.036197662353516, "learning_rate": 2.3617777777777778e-05, "loss": 0.6993, "step": 7194 }, { "epoch": 57.56, "grad_norm": 43.71825408935547, "learning_rate": 2.3613333333333333e-05, "loss": 1.0249, "step": 7195 }, { "epoch": 57.568, "grad_norm": 31.63106918334961, "learning_rate": 2.3608888888888888e-05, "loss": 0.8984, "step": 7196 }, { "epoch": 57.576, "grad_norm": 21.7728214263916, "learning_rate": 2.3604444444444446e-05, "loss": 0.8845, "step": 7197 }, { "epoch": 57.584, "grad_norm": 25.582706451416016, "learning_rate": 2.36e-05, "loss": 0.8352, "step": 7198 }, { "epoch": 57.592, "grad_norm": 15.273324012756348, "learning_rate": 2.3595555555555556e-05, "loss": 1.0248, "step": 7199 }, { "epoch": 57.6, "grad_norm": 11.870538711547852, "learning_rate": 2.359111111111111e-05, "loss": 0.8925, "step": 7200 }, { "epoch": 57.608, "grad_norm": 19.94508171081543, "learning_rate": 2.358666666666667e-05, "loss": 1.2473, "step": 7201 }, { "epoch": 57.616, "grad_norm": 33.77299880981445, "learning_rate": 2.3582222222222224e-05, "loss": 1.1258, "step": 7202 }, { "epoch": 57.624, "grad_norm": 21.834936141967773, "learning_rate": 2.357777777777778e-05, "loss": 0.823, "step": 7203 }, { "epoch": 57.632, "grad_norm": 15.914432525634766, "learning_rate": 2.3573333333333334e-05, "loss": 0.9883, "step": 7204 }, { "epoch": 57.64, "grad_norm": 31.151931762695312, "learning_rate": 2.3568888888888892e-05, "loss": 1.0821, "step": 7205 }, { "epoch": 57.648, "grad_norm": 19.33808135986328, "learning_rate": 2.3564444444444443e-05, "loss": 1.1929, "step": 7206 }, { "epoch": 57.656, "grad_norm": 21.486257553100586, "learning_rate": 2.356e-05, "loss": 0.5913, "step": 7207 }, { "epoch": 57.664, "grad_norm": 52.85403823852539, "learning_rate": 2.3555555555555556e-05, "loss": 1.0482, "step": 7208 }, { "epoch": 57.672, "grad_norm": 21.190488815307617, "learning_rate": 2.355111111111111e-05, "loss": 0.7452, "step": 7209 }, { "epoch": 57.68, "grad_norm": 17.968603134155273, "learning_rate": 2.3546666666666666e-05, "loss": 1.0507, "step": 7210 }, { "epoch": 57.688, "grad_norm": 36.69367980957031, "learning_rate": 2.3542222222222224e-05, "loss": 0.8539, "step": 7211 }, { "epoch": 57.696, "grad_norm": 66.29915618896484, "learning_rate": 2.353777777777778e-05, "loss": 0.9198, "step": 7212 }, { "epoch": 57.704, "grad_norm": 18.991455078125, "learning_rate": 2.3533333333333334e-05, "loss": 1.1115, "step": 7213 }, { "epoch": 57.712, "grad_norm": 26.577621459960938, "learning_rate": 2.352888888888889e-05, "loss": 0.9461, "step": 7214 }, { "epoch": 57.72, "grad_norm": 23.302837371826172, "learning_rate": 2.3524444444444447e-05, "loss": 1.408, "step": 7215 }, { "epoch": 57.728, "grad_norm": 22.242612838745117, "learning_rate": 2.3520000000000002e-05, "loss": 1.238, "step": 7216 }, { "epoch": 57.736, "grad_norm": 27.777246475219727, "learning_rate": 2.3515555555555557e-05, "loss": 1.0865, "step": 7217 }, { "epoch": 57.744, "grad_norm": 27.858444213867188, "learning_rate": 2.351111111111111e-05, "loss": 1.3097, "step": 7218 }, { "epoch": 57.752, "grad_norm": 20.225576400756836, "learning_rate": 2.350666666666667e-05, "loss": 0.5968, "step": 7219 }, { "epoch": 57.76, "grad_norm": 18.07209587097168, "learning_rate": 2.350222222222222e-05, "loss": 0.8232, "step": 7220 }, { "epoch": 57.768, "grad_norm": 20.806255340576172, "learning_rate": 2.349777777777778e-05, "loss": 0.8791, "step": 7221 }, { "epoch": 57.776, "grad_norm": 40.762046813964844, "learning_rate": 2.3493333333333334e-05, "loss": 0.9022, "step": 7222 }, { "epoch": 57.784, "grad_norm": 19.927478790283203, "learning_rate": 2.3488888888888893e-05, "loss": 1.1266, "step": 7223 }, { "epoch": 57.792, "grad_norm": 26.41970443725586, "learning_rate": 2.3484444444444444e-05, "loss": 0.9203, "step": 7224 }, { "epoch": 57.8, "grad_norm": 32.092227935791016, "learning_rate": 2.3480000000000002e-05, "loss": 1.3478, "step": 7225 }, { "epoch": 57.808, "grad_norm": 25.385284423828125, "learning_rate": 2.3475555555555557e-05, "loss": 1.2686, "step": 7226 }, { "epoch": 57.816, "grad_norm": 30.94202423095703, "learning_rate": 2.3471111111111112e-05, "loss": 0.957, "step": 7227 }, { "epoch": 57.824, "grad_norm": 18.308212280273438, "learning_rate": 2.3466666666666667e-05, "loss": 1.045, "step": 7228 }, { "epoch": 57.832, "grad_norm": 33.73159408569336, "learning_rate": 2.3462222222222225e-05, "loss": 0.8051, "step": 7229 }, { "epoch": 57.84, "grad_norm": 33.687320709228516, "learning_rate": 2.345777777777778e-05, "loss": 0.8409, "step": 7230 }, { "epoch": 57.848, "grad_norm": 19.93294334411621, "learning_rate": 2.3453333333333335e-05, "loss": 1.0513, "step": 7231 }, { "epoch": 57.856, "grad_norm": 30.302892684936523, "learning_rate": 2.344888888888889e-05, "loss": 0.801, "step": 7232 }, { "epoch": 57.864, "grad_norm": 26.109073638916016, "learning_rate": 2.3444444444444448e-05, "loss": 1.4734, "step": 7233 }, { "epoch": 57.872, "grad_norm": 22.958940505981445, "learning_rate": 2.344e-05, "loss": 1.4478, "step": 7234 }, { "epoch": 57.88, "grad_norm": 27.09451675415039, "learning_rate": 2.3435555555555557e-05, "loss": 1.1809, "step": 7235 }, { "epoch": 57.888, "grad_norm": 13.078006744384766, "learning_rate": 2.3431111111111112e-05, "loss": 0.8185, "step": 7236 }, { "epoch": 57.896, "grad_norm": 30.977642059326172, "learning_rate": 2.342666666666667e-05, "loss": 0.8258, "step": 7237 }, { "epoch": 57.904, "grad_norm": 30.8599853515625, "learning_rate": 2.3422222222222222e-05, "loss": 1.1007, "step": 7238 }, { "epoch": 57.912, "grad_norm": 29.383935928344727, "learning_rate": 2.341777777777778e-05, "loss": 0.7111, "step": 7239 }, { "epoch": 57.92, "grad_norm": 26.7969970703125, "learning_rate": 2.3413333333333335e-05, "loss": 0.8784, "step": 7240 }, { "epoch": 57.928, "grad_norm": 24.272750854492188, "learning_rate": 2.340888888888889e-05, "loss": 0.8574, "step": 7241 }, { "epoch": 57.936, "grad_norm": 25.82525634765625, "learning_rate": 2.3404444444444445e-05, "loss": 0.7825, "step": 7242 }, { "epoch": 57.944, "grad_norm": 22.167211532592773, "learning_rate": 2.3400000000000003e-05, "loss": 1.0685, "step": 7243 }, { "epoch": 57.952, "grad_norm": 18.979175567626953, "learning_rate": 2.3395555555555558e-05, "loss": 0.7984, "step": 7244 }, { "epoch": 57.96, "grad_norm": 31.223413467407227, "learning_rate": 2.339111111111111e-05, "loss": 0.8097, "step": 7245 }, { "epoch": 57.968, "grad_norm": 23.329557418823242, "learning_rate": 2.3386666666666668e-05, "loss": 1.1017, "step": 7246 }, { "epoch": 57.976, "grad_norm": 24.39718246459961, "learning_rate": 2.3382222222222222e-05, "loss": 1.0582, "step": 7247 }, { "epoch": 57.984, "grad_norm": 26.084823608398438, "learning_rate": 2.337777777777778e-05, "loss": 0.9934, "step": 7248 }, { "epoch": 57.992, "grad_norm": 25.395233154296875, "learning_rate": 2.3373333333333332e-05, "loss": 0.8164, "step": 7249 }, { "epoch": 58.0, "grad_norm": 18.30264663696289, "learning_rate": 2.336888888888889e-05, "loss": 0.6509, "step": 7250 }, { "epoch": 58.0, "eval_loss": 1.081503987312317, "eval_map": 0.4029, "eval_map_50": 0.7284, "eval_map_75": 0.3746, "eval_map_Coverall": 0.6457, "eval_map_Face_Shield": 0.4403, "eval_map_Gloves": 0.3425, "eval_map_Goggles": 0.1991, "eval_map_Mask": 0.3871, "eval_map_large": 0.5964, "eval_map_medium": 0.2637, "eval_map_small": 0.3061, "eval_mar_1": 0.3177, "eval_mar_10": 0.5379, "eval_mar_100": 0.5459, "eval_mar_100_Coverall": 0.7467, "eval_mar_100_Face_Shield": 0.6235, "eval_mar_100_Gloves": 0.441, "eval_mar_100_Goggles": 0.4375, "eval_mar_100_Mask": 0.4808, "eval_mar_large": 0.6636, "eval_mar_medium": 0.4108, "eval_mar_small": 0.354, "eval_runtime": 1.0287, "eval_samples_per_second": 28.191, "eval_steps_per_second": 1.944, "step": 7250 }, { "epoch": 58.008, "grad_norm": 30.881704330444336, "learning_rate": 2.3364444444444445e-05, "loss": 0.7911, "step": 7251 }, { "epoch": 58.016, "grad_norm": 251.79281616210938, "learning_rate": 2.336e-05, "loss": 0.7401, "step": 7252 }, { "epoch": 58.024, "grad_norm": 100.13894653320312, "learning_rate": 2.3355555555555555e-05, "loss": 0.9566, "step": 7253 }, { "epoch": 58.032, "grad_norm": 26.101016998291016, "learning_rate": 2.3351111111111113e-05, "loss": 0.9259, "step": 7254 }, { "epoch": 58.04, "grad_norm": 28.915706634521484, "learning_rate": 2.3346666666666668e-05, "loss": 0.8107, "step": 7255 }, { "epoch": 58.048, "grad_norm": 28.11680030822754, "learning_rate": 2.3342222222222223e-05, "loss": 1.0759, "step": 7256 }, { "epoch": 58.056, "grad_norm": 24.699331283569336, "learning_rate": 2.3337777777777778e-05, "loss": 0.9538, "step": 7257 }, { "epoch": 58.064, "grad_norm": 25.71484375, "learning_rate": 2.3333333333333336e-05, "loss": 0.7196, "step": 7258 }, { "epoch": 58.072, "grad_norm": 139.5827178955078, "learning_rate": 2.332888888888889e-05, "loss": 3.0889, "step": 7259 }, { "epoch": 58.08, "grad_norm": 25.709138870239258, "learning_rate": 2.3324444444444445e-05, "loss": 1.1388, "step": 7260 }, { "epoch": 58.088, "grad_norm": 25.01253318786621, "learning_rate": 2.332e-05, "loss": 1.2874, "step": 7261 }, { "epoch": 58.096, "grad_norm": 14.692121505737305, "learning_rate": 2.331555555555556e-05, "loss": 0.8271, "step": 7262 }, { "epoch": 58.104, "grad_norm": 52.002105712890625, "learning_rate": 2.331111111111111e-05, "loss": 1.3213, "step": 7263 }, { "epoch": 58.112, "grad_norm": 29.370925903320312, "learning_rate": 2.3306666666666668e-05, "loss": 0.8408, "step": 7264 }, { "epoch": 58.12, "grad_norm": 18.690717697143555, "learning_rate": 2.3302222222222223e-05, "loss": 0.651, "step": 7265 }, { "epoch": 58.128, "grad_norm": 20.25519561767578, "learning_rate": 2.3297777777777778e-05, "loss": 0.9427, "step": 7266 }, { "epoch": 58.136, "grad_norm": 36.20740509033203, "learning_rate": 2.3293333333333333e-05, "loss": 1.3617, "step": 7267 }, { "epoch": 58.144, "grad_norm": 21.043087005615234, "learning_rate": 2.328888888888889e-05, "loss": 0.7747, "step": 7268 }, { "epoch": 58.152, "grad_norm": 35.86779022216797, "learning_rate": 2.3284444444444446e-05, "loss": 0.8317, "step": 7269 }, { "epoch": 58.16, "grad_norm": 30.780946731567383, "learning_rate": 2.328e-05, "loss": 0.9575, "step": 7270 }, { "epoch": 58.168, "grad_norm": 39.00396728515625, "learning_rate": 2.3275555555555556e-05, "loss": 0.7549, "step": 7271 }, { "epoch": 58.176, "grad_norm": 80.9348373413086, "learning_rate": 2.3271111111111114e-05, "loss": 0.8354, "step": 7272 }, { "epoch": 58.184, "grad_norm": 23.48013687133789, "learning_rate": 2.326666666666667e-05, "loss": 1.0715, "step": 7273 }, { "epoch": 58.192, "grad_norm": 31.369182586669922, "learning_rate": 2.3262222222222223e-05, "loss": 1.4129, "step": 7274 }, { "epoch": 58.2, "grad_norm": 34.01041793823242, "learning_rate": 2.325777777777778e-05, "loss": 0.6943, "step": 7275 }, { "epoch": 58.208, "grad_norm": 36.041629791259766, "learning_rate": 2.3253333333333337e-05, "loss": 0.6092, "step": 7276 }, { "epoch": 58.216, "grad_norm": 18.16661262512207, "learning_rate": 2.3248888888888888e-05, "loss": 1.0224, "step": 7277 }, { "epoch": 58.224, "grad_norm": 16.05752944946289, "learning_rate": 2.3244444444444446e-05, "loss": 1.0153, "step": 7278 }, { "epoch": 58.232, "grad_norm": 21.99092674255371, "learning_rate": 2.324e-05, "loss": 0.9689, "step": 7279 }, { "epoch": 58.24, "grad_norm": 12.773061752319336, "learning_rate": 2.3235555555555556e-05, "loss": 0.8159, "step": 7280 }, { "epoch": 58.248, "grad_norm": 27.465595245361328, "learning_rate": 2.323111111111111e-05, "loss": 1.2643, "step": 7281 }, { "epoch": 58.256, "grad_norm": 23.290678024291992, "learning_rate": 2.322666666666667e-05, "loss": 1.0182, "step": 7282 }, { "epoch": 58.264, "grad_norm": 46.165550231933594, "learning_rate": 2.3222222222222224e-05, "loss": 0.9565, "step": 7283 }, { "epoch": 58.272, "grad_norm": 14.094633102416992, "learning_rate": 2.321777777777778e-05, "loss": 1.1356, "step": 7284 }, { "epoch": 58.28, "grad_norm": 31.08489227294922, "learning_rate": 2.3213333333333334e-05, "loss": 1.1732, "step": 7285 }, { "epoch": 58.288, "grad_norm": 182.63327026367188, "learning_rate": 2.3208888888888892e-05, "loss": 1.1766, "step": 7286 }, { "epoch": 58.296, "grad_norm": 39.49159622192383, "learning_rate": 2.3204444444444447e-05, "loss": 1.2077, "step": 7287 }, { "epoch": 58.304, "grad_norm": 32.89681625366211, "learning_rate": 2.32e-05, "loss": 0.7638, "step": 7288 }, { "epoch": 58.312, "grad_norm": 22.355873107910156, "learning_rate": 2.3195555555555556e-05, "loss": 1.0246, "step": 7289 }, { "epoch": 58.32, "grad_norm": 43.0556640625, "learning_rate": 2.3191111111111115e-05, "loss": 0.894, "step": 7290 }, { "epoch": 58.328, "grad_norm": 26.47139549255371, "learning_rate": 2.3186666666666666e-05, "loss": 0.6657, "step": 7291 }, { "epoch": 58.336, "grad_norm": 31.45051383972168, "learning_rate": 2.3182222222222224e-05, "loss": 1.1951, "step": 7292 }, { "epoch": 58.344, "grad_norm": 19.88701820373535, "learning_rate": 2.317777777777778e-05, "loss": 1.1585, "step": 7293 }, { "epoch": 58.352, "grad_norm": 18.844377517700195, "learning_rate": 2.3173333333333337e-05, "loss": 1.0045, "step": 7294 }, { "epoch": 58.36, "grad_norm": 372.691650390625, "learning_rate": 2.316888888888889e-05, "loss": 1.0003, "step": 7295 }, { "epoch": 58.368, "grad_norm": 21.030742645263672, "learning_rate": 2.3164444444444444e-05, "loss": 1.4075, "step": 7296 }, { "epoch": 58.376, "grad_norm": 23.057044982910156, "learning_rate": 2.3160000000000002e-05, "loss": 0.8962, "step": 7297 }, { "epoch": 58.384, "grad_norm": 29.45281982421875, "learning_rate": 2.3155555555555557e-05, "loss": 0.9945, "step": 7298 }, { "epoch": 58.392, "grad_norm": 31.493934631347656, "learning_rate": 2.315111111111111e-05, "loss": 0.8517, "step": 7299 }, { "epoch": 58.4, "grad_norm": 29.855026245117188, "learning_rate": 2.3146666666666666e-05, "loss": 0.8738, "step": 7300 }, { "epoch": 58.408, "grad_norm": 23.313688278198242, "learning_rate": 2.3142222222222225e-05, "loss": 1.2235, "step": 7301 }, { "epoch": 58.416, "grad_norm": 15.455368995666504, "learning_rate": 2.3137777777777776e-05, "loss": 0.6929, "step": 7302 }, { "epoch": 58.424, "grad_norm": 17.279273986816406, "learning_rate": 2.3133333333333334e-05, "loss": 0.8316, "step": 7303 }, { "epoch": 58.432, "grad_norm": 30.925983428955078, "learning_rate": 2.312888888888889e-05, "loss": 0.5755, "step": 7304 }, { "epoch": 58.44, "grad_norm": 164.30442810058594, "learning_rate": 2.3124444444444447e-05, "loss": 1.2844, "step": 7305 }, { "epoch": 58.448, "grad_norm": 19.95555305480957, "learning_rate": 2.312e-05, "loss": 1.1106, "step": 7306 }, { "epoch": 58.456, "grad_norm": 24.001691818237305, "learning_rate": 2.3115555555555557e-05, "loss": 1.6132, "step": 7307 }, { "epoch": 58.464, "grad_norm": 23.689733505249023, "learning_rate": 2.3111111111111112e-05, "loss": 0.9496, "step": 7308 }, { "epoch": 58.472, "grad_norm": 25.76511001586914, "learning_rate": 2.3106666666666667e-05, "loss": 0.958, "step": 7309 }, { "epoch": 58.48, "grad_norm": 23.181550979614258, "learning_rate": 2.310222222222222e-05, "loss": 0.9427, "step": 7310 }, { "epoch": 58.488, "grad_norm": 37.94765853881836, "learning_rate": 2.309777777777778e-05, "loss": 0.7294, "step": 7311 }, { "epoch": 58.496, "grad_norm": 48.73480987548828, "learning_rate": 2.3093333333333335e-05, "loss": 0.9706, "step": 7312 }, { "epoch": 58.504, "grad_norm": 30.192413330078125, "learning_rate": 2.308888888888889e-05, "loss": 1.2417, "step": 7313 }, { "epoch": 58.512, "grad_norm": 20.710487365722656, "learning_rate": 2.3084444444444444e-05, "loss": 0.8962, "step": 7314 }, { "epoch": 58.52, "grad_norm": 18.328929901123047, "learning_rate": 2.3080000000000003e-05, "loss": 1.4262, "step": 7315 }, { "epoch": 58.528, "grad_norm": 24.087230682373047, "learning_rate": 2.3075555555555557e-05, "loss": 1.0349, "step": 7316 }, { "epoch": 58.536, "grad_norm": 51.3505859375, "learning_rate": 2.3071111111111112e-05, "loss": 1.0801, "step": 7317 }, { "epoch": 58.544, "grad_norm": 33.8587646484375, "learning_rate": 2.3066666666666667e-05, "loss": 1.2095, "step": 7318 }, { "epoch": 58.552, "grad_norm": 20.222368240356445, "learning_rate": 2.3062222222222225e-05, "loss": 0.8743, "step": 7319 }, { "epoch": 58.56, "grad_norm": 186.5205078125, "learning_rate": 2.3057777777777777e-05, "loss": 0.871, "step": 7320 }, { "epoch": 58.568, "grad_norm": 66.3591537475586, "learning_rate": 2.3053333333333335e-05, "loss": 0.9787, "step": 7321 }, { "epoch": 58.576, "grad_norm": 21.18622589111328, "learning_rate": 2.304888888888889e-05, "loss": 0.7702, "step": 7322 }, { "epoch": 58.584, "grad_norm": 69.81478881835938, "learning_rate": 2.3044444444444445e-05, "loss": 0.8018, "step": 7323 }, { "epoch": 58.592, "grad_norm": 42.74208068847656, "learning_rate": 2.304e-05, "loss": 0.965, "step": 7324 }, { "epoch": 58.6, "grad_norm": 219.00218200683594, "learning_rate": 2.3035555555555558e-05, "loss": 0.8063, "step": 7325 }, { "epoch": 58.608, "grad_norm": 45.81867980957031, "learning_rate": 2.3031111111111113e-05, "loss": 1.2465, "step": 7326 }, { "epoch": 58.616, "grad_norm": 16.111446380615234, "learning_rate": 2.3026666666666668e-05, "loss": 0.6867, "step": 7327 }, { "epoch": 58.624, "grad_norm": 23.624849319458008, "learning_rate": 2.3022222222222222e-05, "loss": 0.9105, "step": 7328 }, { "epoch": 58.632, "grad_norm": 58.24786376953125, "learning_rate": 2.301777777777778e-05, "loss": 1.0922, "step": 7329 }, { "epoch": 58.64, "grad_norm": 23.600807189941406, "learning_rate": 2.3013333333333335e-05, "loss": 0.6805, "step": 7330 }, { "epoch": 58.648, "grad_norm": 19.977670669555664, "learning_rate": 2.300888888888889e-05, "loss": 0.8471, "step": 7331 }, { "epoch": 58.656, "grad_norm": 49.48329544067383, "learning_rate": 2.3004444444444445e-05, "loss": 1.8173, "step": 7332 }, { "epoch": 58.664, "grad_norm": 17.528244018554688, "learning_rate": 2.3000000000000003e-05, "loss": 0.9272, "step": 7333 }, { "epoch": 58.672, "grad_norm": 18.547889709472656, "learning_rate": 2.2995555555555555e-05, "loss": 0.5906, "step": 7334 }, { "epoch": 58.68, "grad_norm": 35.373416900634766, "learning_rate": 2.2991111111111113e-05, "loss": 0.7445, "step": 7335 }, { "epoch": 58.688, "grad_norm": 33.055809020996094, "learning_rate": 2.2986666666666668e-05, "loss": 1.051, "step": 7336 }, { "epoch": 58.696, "grad_norm": 20.81576156616211, "learning_rate": 2.2982222222222223e-05, "loss": 1.094, "step": 7337 }, { "epoch": 58.704, "grad_norm": 30.62239646911621, "learning_rate": 2.2977777777777778e-05, "loss": 1.0356, "step": 7338 }, { "epoch": 58.712, "grad_norm": 14.357827186584473, "learning_rate": 2.2973333333333336e-05, "loss": 0.8158, "step": 7339 }, { "epoch": 58.72, "grad_norm": 31.603914260864258, "learning_rate": 2.296888888888889e-05, "loss": 0.8759, "step": 7340 }, { "epoch": 58.728, "grad_norm": 31.778030395507812, "learning_rate": 2.2964444444444446e-05, "loss": 0.9723, "step": 7341 }, { "epoch": 58.736, "grad_norm": 19.35239601135254, "learning_rate": 2.296e-05, "loss": 0.6384, "step": 7342 }, { "epoch": 58.744, "grad_norm": 77.03095245361328, "learning_rate": 2.295555555555556e-05, "loss": 1.7872, "step": 7343 }, { "epoch": 58.752, "grad_norm": 20.951622009277344, "learning_rate": 2.2951111111111113e-05, "loss": 0.6941, "step": 7344 }, { "epoch": 58.76, "grad_norm": 25.264862060546875, "learning_rate": 2.294666666666667e-05, "loss": 0.9197, "step": 7345 }, { "epoch": 58.768, "grad_norm": 27.8410587310791, "learning_rate": 2.2942222222222223e-05, "loss": 2.5735, "step": 7346 }, { "epoch": 58.776, "grad_norm": 232.64047241210938, "learning_rate": 2.293777777777778e-05, "loss": 1.0145, "step": 7347 }, { "epoch": 58.784, "grad_norm": 18.084978103637695, "learning_rate": 2.2933333333333333e-05, "loss": 1.0577, "step": 7348 }, { "epoch": 58.792, "grad_norm": 36.3968620300293, "learning_rate": 2.2928888888888888e-05, "loss": 0.9028, "step": 7349 }, { "epoch": 58.8, "grad_norm": 49.86820602416992, "learning_rate": 2.2924444444444446e-05, "loss": 0.7606, "step": 7350 }, { "epoch": 58.808, "grad_norm": 21.5797061920166, "learning_rate": 2.292e-05, "loss": 1.0802, "step": 7351 }, { "epoch": 58.816, "grad_norm": 21.3676815032959, "learning_rate": 2.2915555555555556e-05, "loss": 0.9912, "step": 7352 }, { "epoch": 58.824, "grad_norm": 24.456125259399414, "learning_rate": 2.291111111111111e-05, "loss": 1.2703, "step": 7353 }, { "epoch": 58.832, "grad_norm": 29.485816955566406, "learning_rate": 2.290666666666667e-05, "loss": 1.2707, "step": 7354 }, { "epoch": 58.84, "grad_norm": 40.67139434814453, "learning_rate": 2.2902222222222224e-05, "loss": 1.0614, "step": 7355 }, { "epoch": 58.848, "grad_norm": 28.39382553100586, "learning_rate": 2.289777777777778e-05, "loss": 0.8623, "step": 7356 }, { "epoch": 58.856, "grad_norm": 25.030014038085938, "learning_rate": 2.2893333333333333e-05, "loss": 0.9225, "step": 7357 }, { "epoch": 58.864, "grad_norm": 53.771427154541016, "learning_rate": 2.288888888888889e-05, "loss": 1.7166, "step": 7358 }, { "epoch": 58.872, "grad_norm": 74.14873504638672, "learning_rate": 2.2884444444444443e-05, "loss": 0.8756, "step": 7359 }, { "epoch": 58.88, "grad_norm": 34.874351501464844, "learning_rate": 2.288e-05, "loss": 0.8608, "step": 7360 }, { "epoch": 58.888, "grad_norm": 29.036571502685547, "learning_rate": 2.2875555555555556e-05, "loss": 1.0974, "step": 7361 }, { "epoch": 58.896, "grad_norm": 21.553020477294922, "learning_rate": 2.2871111111111114e-05, "loss": 0.811, "step": 7362 }, { "epoch": 58.904, "grad_norm": 17.167736053466797, "learning_rate": 2.2866666666666666e-05, "loss": 0.8935, "step": 7363 }, { "epoch": 58.912, "grad_norm": 25.39564323425293, "learning_rate": 2.2862222222222224e-05, "loss": 1.2257, "step": 7364 }, { "epoch": 58.92, "grad_norm": 53.15165710449219, "learning_rate": 2.285777777777778e-05, "loss": 0.7883, "step": 7365 }, { "epoch": 58.928, "grad_norm": 32.78211212158203, "learning_rate": 2.2853333333333334e-05, "loss": 0.8615, "step": 7366 }, { "epoch": 58.936, "grad_norm": 63.75340270996094, "learning_rate": 2.284888888888889e-05, "loss": 0.7985, "step": 7367 }, { "epoch": 58.944, "grad_norm": 42.80321502685547, "learning_rate": 2.2844444444444447e-05, "loss": 1.2963, "step": 7368 }, { "epoch": 58.952, "grad_norm": 24.058542251586914, "learning_rate": 2.284e-05, "loss": 0.766, "step": 7369 }, { "epoch": 58.96, "grad_norm": 62.39369583129883, "learning_rate": 2.2835555555555556e-05, "loss": 1.0211, "step": 7370 }, { "epoch": 58.968, "grad_norm": 22.566307067871094, "learning_rate": 2.283111111111111e-05, "loss": 0.9952, "step": 7371 }, { "epoch": 58.976, "grad_norm": 40.69001770019531, "learning_rate": 2.282666666666667e-05, "loss": 0.9957, "step": 7372 }, { "epoch": 58.984, "grad_norm": 15.229341506958008, "learning_rate": 2.282222222222222e-05, "loss": 0.8285, "step": 7373 }, { "epoch": 58.992, "grad_norm": 19.59900665283203, "learning_rate": 2.281777777777778e-05, "loss": 0.7782, "step": 7374 }, { "epoch": 59.0, "grad_norm": 14.807244300842285, "learning_rate": 2.2813333333333334e-05, "loss": 1.0852, "step": 7375 }, { "epoch": 59.0, "eval_loss": 0.9884536862373352, "eval_map": 0.4162, "eval_map_50": 0.7647, "eval_map_75": 0.4114, "eval_map_Coverall": 0.6056, "eval_map_Face_Shield": 0.4972, "eval_map_Gloves": 0.3455, "eval_map_Goggles": 0.2124, "eval_map_Mask": 0.4203, "eval_map_large": 0.6023, "eval_map_medium": 0.3282, "eval_map_small": 0.3114, "eval_mar_1": 0.3141, "eval_mar_10": 0.5679, "eval_mar_100": 0.5848, "eval_mar_100_Coverall": 0.7733, "eval_mar_100_Face_Shield": 0.7, "eval_mar_100_Gloves": 0.4885, "eval_mar_100_Goggles": 0.4563, "eval_mar_100_Mask": 0.5058, "eval_mar_large": 0.6976, "eval_mar_medium": 0.4965, "eval_mar_small": 0.3602, "eval_runtime": 0.9161, "eval_samples_per_second": 31.654, "eval_steps_per_second": 2.183, "step": 7375 }, { "epoch": 59.008, "grad_norm": 14.712132453918457, "learning_rate": 2.2808888888888892e-05, "loss": 0.7757, "step": 7376 }, { "epoch": 59.016, "grad_norm": 29.857181549072266, "learning_rate": 2.2804444444444444e-05, "loss": 0.7389, "step": 7377 }, { "epoch": 59.024, "grad_norm": 22.647693634033203, "learning_rate": 2.2800000000000002e-05, "loss": 1.69, "step": 7378 }, { "epoch": 59.032, "grad_norm": 21.9699764251709, "learning_rate": 2.2795555555555557e-05, "loss": 0.602, "step": 7379 }, { "epoch": 59.04, "grad_norm": 63.62928771972656, "learning_rate": 2.279111111111111e-05, "loss": 1.0226, "step": 7380 }, { "epoch": 59.048, "grad_norm": 21.558837890625, "learning_rate": 2.2786666666666666e-05, "loss": 1.2264, "step": 7381 }, { "epoch": 59.056, "grad_norm": 21.3332462310791, "learning_rate": 2.2782222222222225e-05, "loss": 0.7149, "step": 7382 }, { "epoch": 59.064, "grad_norm": 45.84822082519531, "learning_rate": 2.277777777777778e-05, "loss": 1.0529, "step": 7383 }, { "epoch": 59.072, "grad_norm": 24.2522029876709, "learning_rate": 2.2773333333333334e-05, "loss": 0.7862, "step": 7384 }, { "epoch": 59.08, "grad_norm": 18.950557708740234, "learning_rate": 2.276888888888889e-05, "loss": 0.8644, "step": 7385 }, { "epoch": 59.088, "grad_norm": 23.24439811706543, "learning_rate": 2.2764444444444447e-05, "loss": 0.7028, "step": 7386 }, { "epoch": 59.096, "grad_norm": 27.141075134277344, "learning_rate": 2.2760000000000002e-05, "loss": 0.8799, "step": 7387 }, { "epoch": 59.104, "grad_norm": 40.79505920410156, "learning_rate": 2.2755555555555557e-05, "loss": 2.8932, "step": 7388 }, { "epoch": 59.112, "grad_norm": 20.90213394165039, "learning_rate": 2.2751111111111112e-05, "loss": 1.1199, "step": 7389 }, { "epoch": 59.12, "grad_norm": 43.16044616699219, "learning_rate": 2.274666666666667e-05, "loss": 1.8734, "step": 7390 }, { "epoch": 59.128, "grad_norm": 20.472558975219727, "learning_rate": 2.2742222222222222e-05, "loss": 0.9879, "step": 7391 }, { "epoch": 59.136, "grad_norm": 39.98176193237305, "learning_rate": 2.273777777777778e-05, "loss": 0.9087, "step": 7392 }, { "epoch": 59.144, "grad_norm": 20.262800216674805, "learning_rate": 2.2733333333333335e-05, "loss": 1.1327, "step": 7393 }, { "epoch": 59.152, "grad_norm": 19.002059936523438, "learning_rate": 2.272888888888889e-05, "loss": 0.6136, "step": 7394 }, { "epoch": 59.16, "grad_norm": 34.99797439575195, "learning_rate": 2.2724444444444444e-05, "loss": 1.4429, "step": 7395 }, { "epoch": 59.168, "grad_norm": 15.262410163879395, "learning_rate": 2.2720000000000003e-05, "loss": 0.7404, "step": 7396 }, { "epoch": 59.176, "grad_norm": 39.65351104736328, "learning_rate": 2.2715555555555558e-05, "loss": 1.3074, "step": 7397 }, { "epoch": 59.184, "grad_norm": 20.674774169921875, "learning_rate": 2.2711111111111112e-05, "loss": 0.6213, "step": 7398 }, { "epoch": 59.192, "grad_norm": 27.519495010375977, "learning_rate": 2.2706666666666667e-05, "loss": 0.9129, "step": 7399 }, { "epoch": 59.2, "grad_norm": 23.799161911010742, "learning_rate": 2.2702222222222222e-05, "loss": 1.0231, "step": 7400 }, { "epoch": 59.208, "grad_norm": 43.578392028808594, "learning_rate": 2.269777777777778e-05, "loss": 1.1882, "step": 7401 }, { "epoch": 59.216, "grad_norm": 28.021703720092773, "learning_rate": 2.2693333333333332e-05, "loss": 0.8356, "step": 7402 }, { "epoch": 59.224, "grad_norm": 19.935876846313477, "learning_rate": 2.268888888888889e-05, "loss": 0.8001, "step": 7403 }, { "epoch": 59.232, "grad_norm": 94.62481689453125, "learning_rate": 2.2684444444444445e-05, "loss": 1.3478, "step": 7404 }, { "epoch": 59.24, "grad_norm": 34.70956039428711, "learning_rate": 2.268e-05, "loss": 1.2353, "step": 7405 }, { "epoch": 59.248, "grad_norm": 22.468692779541016, "learning_rate": 2.2675555555555555e-05, "loss": 0.7191, "step": 7406 }, { "epoch": 59.256, "grad_norm": 30.45767593383789, "learning_rate": 2.2671111111111113e-05, "loss": 1.0349, "step": 7407 }, { "epoch": 59.264, "grad_norm": 32.2130241394043, "learning_rate": 2.2666666666666668e-05, "loss": 1.2199, "step": 7408 }, { "epoch": 59.272, "grad_norm": 21.290775299072266, "learning_rate": 2.2662222222222222e-05, "loss": 1.0537, "step": 7409 }, { "epoch": 59.28, "grad_norm": 20.27643394470215, "learning_rate": 2.2657777777777777e-05, "loss": 0.7545, "step": 7410 }, { "epoch": 59.288, "grad_norm": 15.627137184143066, "learning_rate": 2.2653333333333336e-05, "loss": 1.1003, "step": 7411 }, { "epoch": 59.296, "grad_norm": 15.142483711242676, "learning_rate": 2.264888888888889e-05, "loss": 0.7095, "step": 7412 }, { "epoch": 59.304, "grad_norm": 83.85506439208984, "learning_rate": 2.2644444444444445e-05, "loss": 1.0877, "step": 7413 }, { "epoch": 59.312, "grad_norm": 31.761899948120117, "learning_rate": 2.264e-05, "loss": 0.9867, "step": 7414 }, { "epoch": 59.32, "grad_norm": 41.24466323852539, "learning_rate": 2.263555555555556e-05, "loss": 1.0126, "step": 7415 }, { "epoch": 59.328, "grad_norm": 16.612974166870117, "learning_rate": 2.263111111111111e-05, "loss": 0.764, "step": 7416 }, { "epoch": 59.336, "grad_norm": 22.498477935791016, "learning_rate": 2.2626666666666668e-05, "loss": 0.796, "step": 7417 }, { "epoch": 59.344, "grad_norm": 16.82176971435547, "learning_rate": 2.2622222222222223e-05, "loss": 0.7159, "step": 7418 }, { "epoch": 59.352, "grad_norm": 42.57083511352539, "learning_rate": 2.261777777777778e-05, "loss": 0.9692, "step": 7419 }, { "epoch": 59.36, "grad_norm": 19.66693878173828, "learning_rate": 2.2613333333333333e-05, "loss": 0.8803, "step": 7420 }, { "epoch": 59.368, "grad_norm": 23.075172424316406, "learning_rate": 2.260888888888889e-05, "loss": 0.925, "step": 7421 }, { "epoch": 59.376, "grad_norm": 25.67266845703125, "learning_rate": 2.2604444444444446e-05, "loss": 0.9635, "step": 7422 }, { "epoch": 59.384, "grad_norm": 24.257875442504883, "learning_rate": 2.26e-05, "loss": 1.1165, "step": 7423 }, { "epoch": 59.392, "grad_norm": 37.19083023071289, "learning_rate": 2.2595555555555555e-05, "loss": 1.1024, "step": 7424 }, { "epoch": 59.4, "grad_norm": 61.11241149902344, "learning_rate": 2.2591111111111114e-05, "loss": 0.9791, "step": 7425 }, { "epoch": 59.408, "grad_norm": 41.13558578491211, "learning_rate": 2.258666666666667e-05, "loss": 1.1759, "step": 7426 }, { "epoch": 59.416, "grad_norm": 13.392881393432617, "learning_rate": 2.2582222222222223e-05, "loss": 1.0556, "step": 7427 }, { "epoch": 59.424, "grad_norm": 26.082565307617188, "learning_rate": 2.2577777777777778e-05, "loss": 1.0389, "step": 7428 }, { "epoch": 59.432, "grad_norm": 32.815467834472656, "learning_rate": 2.2573333333333336e-05, "loss": 0.9965, "step": 7429 }, { "epoch": 59.44, "grad_norm": 116.55896759033203, "learning_rate": 2.2568888888888888e-05, "loss": 0.7493, "step": 7430 }, { "epoch": 59.448, "grad_norm": 17.985958099365234, "learning_rate": 2.2564444444444446e-05, "loss": 0.9105, "step": 7431 }, { "epoch": 59.456, "grad_norm": 19.107158660888672, "learning_rate": 2.256e-05, "loss": 0.8292, "step": 7432 }, { "epoch": 59.464, "grad_norm": 31.475872039794922, "learning_rate": 2.255555555555556e-05, "loss": 0.9281, "step": 7433 }, { "epoch": 59.472, "grad_norm": 24.56932258605957, "learning_rate": 2.255111111111111e-05, "loss": 0.9964, "step": 7434 }, { "epoch": 59.48, "grad_norm": 35.16286849975586, "learning_rate": 2.254666666666667e-05, "loss": 2.6184, "step": 7435 }, { "epoch": 59.488, "grad_norm": 40.37734603881836, "learning_rate": 2.2542222222222224e-05, "loss": 1.3212, "step": 7436 }, { "epoch": 59.496, "grad_norm": 21.24280548095703, "learning_rate": 2.253777777777778e-05, "loss": 0.8789, "step": 7437 }, { "epoch": 59.504, "grad_norm": 45.90513229370117, "learning_rate": 2.2533333333333333e-05, "loss": 0.6577, "step": 7438 }, { "epoch": 59.512, "grad_norm": 16.216535568237305, "learning_rate": 2.252888888888889e-05, "loss": 1.3531, "step": 7439 }, { "epoch": 59.52, "grad_norm": 13.794055938720703, "learning_rate": 2.2524444444444446e-05, "loss": 0.6409, "step": 7440 }, { "epoch": 59.528, "grad_norm": 41.561588287353516, "learning_rate": 2.252e-05, "loss": 1.1085, "step": 7441 }, { "epoch": 59.536, "grad_norm": 14.791423797607422, "learning_rate": 2.2515555555555556e-05, "loss": 0.9784, "step": 7442 }, { "epoch": 59.544, "grad_norm": 37.356842041015625, "learning_rate": 2.2511111111111114e-05, "loss": 0.8565, "step": 7443 }, { "epoch": 59.552, "grad_norm": 44.86454391479492, "learning_rate": 2.250666666666667e-05, "loss": 1.0908, "step": 7444 }, { "epoch": 59.56, "grad_norm": 30.661945343017578, "learning_rate": 2.2502222222222224e-05, "loss": 1.036, "step": 7445 }, { "epoch": 59.568, "grad_norm": 56.33424377441406, "learning_rate": 2.249777777777778e-05, "loss": 0.8184, "step": 7446 }, { "epoch": 59.576, "grad_norm": 28.237348556518555, "learning_rate": 2.2493333333333337e-05, "loss": 0.9525, "step": 7447 }, { "epoch": 59.584, "grad_norm": 23.823123931884766, "learning_rate": 2.248888888888889e-05, "loss": 1.0909, "step": 7448 }, { "epoch": 59.592, "grad_norm": 19.69953155517578, "learning_rate": 2.2484444444444447e-05, "loss": 1.0787, "step": 7449 }, { "epoch": 59.6, "grad_norm": 22.027435302734375, "learning_rate": 2.248e-05, "loss": 1.0152, "step": 7450 }, { "epoch": 59.608, "grad_norm": 11.672690391540527, "learning_rate": 2.2475555555555556e-05, "loss": 0.8245, "step": 7451 }, { "epoch": 59.616, "grad_norm": 25.759912490844727, "learning_rate": 2.247111111111111e-05, "loss": 1.2445, "step": 7452 }, { "epoch": 59.624, "grad_norm": 15.339906692504883, "learning_rate": 2.2466666666666666e-05, "loss": 0.6956, "step": 7453 }, { "epoch": 59.632, "grad_norm": 79.54853057861328, "learning_rate": 2.2462222222222224e-05, "loss": 0.973, "step": 7454 }, { "epoch": 59.64, "grad_norm": 22.006160736083984, "learning_rate": 2.245777777777778e-05, "loss": 0.8374, "step": 7455 }, { "epoch": 59.648, "grad_norm": 61.45706558227539, "learning_rate": 2.2453333333333334e-05, "loss": 0.648, "step": 7456 }, { "epoch": 59.656, "grad_norm": 23.055809020996094, "learning_rate": 2.244888888888889e-05, "loss": 0.6509, "step": 7457 }, { "epoch": 59.664, "grad_norm": 45.21507263183594, "learning_rate": 2.2444444444444447e-05, "loss": 1.2224, "step": 7458 }, { "epoch": 59.672, "grad_norm": 104.09790802001953, "learning_rate": 2.244e-05, "loss": 0.7109, "step": 7459 }, { "epoch": 59.68, "grad_norm": 32.261497497558594, "learning_rate": 2.2435555555555557e-05, "loss": 1.0714, "step": 7460 }, { "epoch": 59.688, "grad_norm": 20.492475509643555, "learning_rate": 2.243111111111111e-05, "loss": 1.3021, "step": 7461 }, { "epoch": 59.696, "grad_norm": 22.9473876953125, "learning_rate": 2.2426666666666667e-05, "loss": 0.7849, "step": 7462 }, { "epoch": 59.704, "grad_norm": 15.762683868408203, "learning_rate": 2.242222222222222e-05, "loss": 0.8096, "step": 7463 }, { "epoch": 59.712, "grad_norm": 22.706993103027344, "learning_rate": 2.241777777777778e-05, "loss": 1.126, "step": 7464 }, { "epoch": 59.72, "grad_norm": 31.606847763061523, "learning_rate": 2.2413333333333334e-05, "loss": 2.4164, "step": 7465 }, { "epoch": 59.728, "grad_norm": 30.06626319885254, "learning_rate": 2.240888888888889e-05, "loss": 1.2626, "step": 7466 }, { "epoch": 59.736, "grad_norm": 19.058319091796875, "learning_rate": 2.2404444444444444e-05, "loss": 1.0504, "step": 7467 }, { "epoch": 59.744, "grad_norm": 28.07870101928711, "learning_rate": 2.2400000000000002e-05, "loss": 1.6147, "step": 7468 }, { "epoch": 59.752, "grad_norm": 38.05958938598633, "learning_rate": 2.2395555555555557e-05, "loss": 0.775, "step": 7469 }, { "epoch": 59.76, "grad_norm": 28.421716690063477, "learning_rate": 2.2391111111111112e-05, "loss": 0.6376, "step": 7470 }, { "epoch": 59.768, "grad_norm": 44.4266471862793, "learning_rate": 2.2386666666666667e-05, "loss": 1.1473, "step": 7471 }, { "epoch": 59.776, "grad_norm": 100.709228515625, "learning_rate": 2.2382222222222225e-05, "loss": 1.2282, "step": 7472 }, { "epoch": 59.784, "grad_norm": 44.50312805175781, "learning_rate": 2.2377777777777777e-05, "loss": 0.6935, "step": 7473 }, { "epoch": 59.792, "grad_norm": 16.706575393676758, "learning_rate": 2.2373333333333335e-05, "loss": 1.2414, "step": 7474 }, { "epoch": 59.8, "grad_norm": 16.969270706176758, "learning_rate": 2.236888888888889e-05, "loss": 0.6655, "step": 7475 }, { "epoch": 59.808, "grad_norm": 27.073040008544922, "learning_rate": 2.2364444444444445e-05, "loss": 1.019, "step": 7476 }, { "epoch": 59.816, "grad_norm": 20.233842849731445, "learning_rate": 2.236e-05, "loss": 0.8676, "step": 7477 }, { "epoch": 59.824, "grad_norm": 19.00225067138672, "learning_rate": 2.2355555555555558e-05, "loss": 1.1999, "step": 7478 }, { "epoch": 59.832, "grad_norm": 32.38365936279297, "learning_rate": 2.2351111111111112e-05, "loss": 0.8465, "step": 7479 }, { "epoch": 59.84, "grad_norm": 176.0545654296875, "learning_rate": 2.2346666666666667e-05, "loss": 0.9935, "step": 7480 }, { "epoch": 59.848, "grad_norm": 54.89240264892578, "learning_rate": 2.2342222222222222e-05, "loss": 1.155, "step": 7481 }, { "epoch": 59.856, "grad_norm": 17.6675968170166, "learning_rate": 2.233777777777778e-05, "loss": 0.8326, "step": 7482 }, { "epoch": 59.864, "grad_norm": 16.674518585205078, "learning_rate": 2.2333333333333335e-05, "loss": 1.052, "step": 7483 }, { "epoch": 59.872, "grad_norm": 52.757240295410156, "learning_rate": 2.232888888888889e-05, "loss": 0.8255, "step": 7484 }, { "epoch": 59.88, "grad_norm": 43.05287170410156, "learning_rate": 2.2324444444444445e-05, "loss": 1.161, "step": 7485 }, { "epoch": 59.888, "grad_norm": 23.481937408447266, "learning_rate": 2.2320000000000003e-05, "loss": 1.0973, "step": 7486 }, { "epoch": 59.896, "grad_norm": 23.393871307373047, "learning_rate": 2.2315555555555555e-05, "loss": 0.8758, "step": 7487 }, { "epoch": 59.904, "grad_norm": 38.87651824951172, "learning_rate": 2.2311111111111113e-05, "loss": 1.1619, "step": 7488 }, { "epoch": 59.912, "grad_norm": 36.47652053833008, "learning_rate": 2.2306666666666668e-05, "loss": 0.9782, "step": 7489 }, { "epoch": 59.92, "grad_norm": 72.9335708618164, "learning_rate": 2.2302222222222226e-05, "loss": 0.7385, "step": 7490 }, { "epoch": 59.928, "grad_norm": 29.437061309814453, "learning_rate": 2.2297777777777777e-05, "loss": 0.7097, "step": 7491 }, { "epoch": 59.936, "grad_norm": 19.82813835144043, "learning_rate": 2.2293333333333336e-05, "loss": 1.0663, "step": 7492 }, { "epoch": 59.944, "grad_norm": 21.6624755859375, "learning_rate": 2.228888888888889e-05, "loss": 0.9366, "step": 7493 }, { "epoch": 59.952, "grad_norm": 23.116182327270508, "learning_rate": 2.2284444444444445e-05, "loss": 0.5183, "step": 7494 }, { "epoch": 59.96, "grad_norm": 24.05706787109375, "learning_rate": 2.228e-05, "loss": 0.7752, "step": 7495 }, { "epoch": 59.968, "grad_norm": 23.47165870666504, "learning_rate": 2.227555555555556e-05, "loss": 0.8492, "step": 7496 }, { "epoch": 59.976, "grad_norm": 16.0249080657959, "learning_rate": 2.2271111111111113e-05, "loss": 0.7295, "step": 7497 }, { "epoch": 59.984, "grad_norm": 30.15252685546875, "learning_rate": 2.2266666666666668e-05, "loss": 1.0359, "step": 7498 }, { "epoch": 59.992, "grad_norm": 17.140146255493164, "learning_rate": 2.2262222222222223e-05, "loss": 1.1812, "step": 7499 }, { "epoch": 60.0, "grad_norm": 46.78312683105469, "learning_rate": 2.225777777777778e-05, "loss": 0.8012, "step": 7500 }, { "epoch": 60.0, "eval_loss": 1.0246362686157227, "eval_map": 0.4213, "eval_map_50": 0.7774, "eval_map_75": 0.3929, "eval_map_Coverall": 0.6572, "eval_map_Face_Shield": 0.4913, "eval_map_Gloves": 0.3677, "eval_map_Goggles": 0.2098, "eval_map_Mask": 0.3808, "eval_map_large": 0.6535, "eval_map_medium": 0.2887, "eval_map_small": 0.2449, "eval_mar_1": 0.3292, "eval_mar_10": 0.5551, "eval_mar_100": 0.5646, "eval_mar_100_Coverall": 0.7622, "eval_mar_100_Face_Shield": 0.6588, "eval_mar_100_Gloves": 0.4836, "eval_mar_100_Goggles": 0.4375, "eval_mar_100_Mask": 0.4808, "eval_mar_large": 0.729, "eval_mar_medium": 0.4359, "eval_mar_small": 0.3011, "eval_runtime": 1.0272, "eval_samples_per_second": 28.231, "eval_steps_per_second": 1.947, "step": 7500 }, { "epoch": 60.008, "grad_norm": 18.85017204284668, "learning_rate": 2.2253333333333336e-05, "loss": 0.7519, "step": 7501 }, { "epoch": 60.016, "grad_norm": 18.44782829284668, "learning_rate": 2.224888888888889e-05, "loss": 0.7001, "step": 7502 }, { "epoch": 60.024, "grad_norm": 39.98960876464844, "learning_rate": 2.2244444444444446e-05, "loss": 1.8769, "step": 7503 }, { "epoch": 60.032, "grad_norm": 36.12211227416992, "learning_rate": 2.224e-05, "loss": 0.8935, "step": 7504 }, { "epoch": 60.04, "grad_norm": 13.211451530456543, "learning_rate": 2.2235555555555555e-05, "loss": 0.8029, "step": 7505 }, { "epoch": 60.048, "grad_norm": 25.59038543701172, "learning_rate": 2.223111111111111e-05, "loss": 1.2558, "step": 7506 }, { "epoch": 60.056, "grad_norm": 29.379289627075195, "learning_rate": 2.222666666666667e-05, "loss": 1.1022, "step": 7507 }, { "epoch": 60.064, "grad_norm": 28.00154685974121, "learning_rate": 2.2222222222222223e-05, "loss": 1.0523, "step": 7508 }, { "epoch": 60.072, "grad_norm": 33.92332077026367, "learning_rate": 2.2217777777777778e-05, "loss": 0.9569, "step": 7509 }, { "epoch": 60.08, "grad_norm": 14.313253402709961, "learning_rate": 2.2213333333333333e-05, "loss": 0.8787, "step": 7510 }, { "epoch": 60.088, "grad_norm": 31.744035720825195, "learning_rate": 2.220888888888889e-05, "loss": 0.9531, "step": 7511 }, { "epoch": 60.096, "grad_norm": 27.253814697265625, "learning_rate": 2.2204444444444446e-05, "loss": 0.8262, "step": 7512 }, { "epoch": 60.104, "grad_norm": 38.382347106933594, "learning_rate": 2.22e-05, "loss": 0.6941, "step": 7513 }, { "epoch": 60.112, "grad_norm": 24.651262283325195, "learning_rate": 2.2195555555555556e-05, "loss": 0.7642, "step": 7514 }, { "epoch": 60.12, "grad_norm": 19.442672729492188, "learning_rate": 2.2191111111111114e-05, "loss": 0.7928, "step": 7515 }, { "epoch": 60.128, "grad_norm": 15.840071678161621, "learning_rate": 2.2186666666666665e-05, "loss": 1.3165, "step": 7516 }, { "epoch": 60.136, "grad_norm": 22.788291931152344, "learning_rate": 2.2182222222222224e-05, "loss": 1.266, "step": 7517 }, { "epoch": 60.144, "grad_norm": 172.2085723876953, "learning_rate": 2.217777777777778e-05, "loss": 3.2953, "step": 7518 }, { "epoch": 60.152, "grad_norm": 16.08576202392578, "learning_rate": 2.2173333333333333e-05, "loss": 0.7815, "step": 7519 }, { "epoch": 60.16, "grad_norm": 50.95745086669922, "learning_rate": 2.2168888888888888e-05, "loss": 0.8736, "step": 7520 }, { "epoch": 60.168, "grad_norm": 26.376449584960938, "learning_rate": 2.2164444444444446e-05, "loss": 1.1735, "step": 7521 }, { "epoch": 60.176, "grad_norm": 29.084360122680664, "learning_rate": 2.216e-05, "loss": 0.6228, "step": 7522 }, { "epoch": 60.184, "grad_norm": 25.819660186767578, "learning_rate": 2.2155555555555556e-05, "loss": 1.1768, "step": 7523 }, { "epoch": 60.192, "grad_norm": 16.510244369506836, "learning_rate": 2.215111111111111e-05, "loss": 0.5639, "step": 7524 }, { "epoch": 60.2, "grad_norm": 35.26551818847656, "learning_rate": 2.214666666666667e-05, "loss": 0.9466, "step": 7525 }, { "epoch": 60.208, "grad_norm": 83.13058471679688, "learning_rate": 2.2142222222222224e-05, "loss": 0.6496, "step": 7526 }, { "epoch": 60.216, "grad_norm": 68.48950958251953, "learning_rate": 2.213777777777778e-05, "loss": 1.3146, "step": 7527 }, { "epoch": 60.224, "grad_norm": 31.4696102142334, "learning_rate": 2.2133333333333334e-05, "loss": 1.0346, "step": 7528 }, { "epoch": 60.232, "grad_norm": 22.702842712402344, "learning_rate": 2.2128888888888892e-05, "loss": 0.8879, "step": 7529 }, { "epoch": 60.24, "grad_norm": 36.332435607910156, "learning_rate": 2.2124444444444443e-05, "loss": 0.8867, "step": 7530 }, { "epoch": 60.248, "grad_norm": 29.747852325439453, "learning_rate": 2.212e-05, "loss": 0.8118, "step": 7531 }, { "epoch": 60.256, "grad_norm": 189.21112060546875, "learning_rate": 2.2115555555555557e-05, "loss": 0.8475, "step": 7532 }, { "epoch": 60.264, "grad_norm": 26.15916633605957, "learning_rate": 2.211111111111111e-05, "loss": 1.1681, "step": 7533 }, { "epoch": 60.272, "grad_norm": 30.89715003967285, "learning_rate": 2.2106666666666666e-05, "loss": 0.927, "step": 7534 }, { "epoch": 60.28, "grad_norm": 73.81597900390625, "learning_rate": 2.2102222222222224e-05, "loss": 0.9344, "step": 7535 }, { "epoch": 60.288, "grad_norm": 24.15369415283203, "learning_rate": 2.209777777777778e-05, "loss": 1.1682, "step": 7536 }, { "epoch": 60.296, "grad_norm": 23.721206665039062, "learning_rate": 2.2093333333333334e-05, "loss": 1.0261, "step": 7537 }, { "epoch": 60.304, "grad_norm": 29.843170166015625, "learning_rate": 2.208888888888889e-05, "loss": 0.821, "step": 7538 }, { "epoch": 60.312, "grad_norm": 17.727947235107422, "learning_rate": 2.2084444444444447e-05, "loss": 1.327, "step": 7539 }, { "epoch": 60.32, "grad_norm": Infinity, "learning_rate": 2.2084444444444447e-05, "loss": 0.7923, "step": 7540 }, { "epoch": 60.328, "grad_norm": 20.134124755859375, "learning_rate": 2.2080000000000002e-05, "loss": 1.1491, "step": 7541 }, { "epoch": 60.336, "grad_norm": 33.372467041015625, "learning_rate": 2.2075555555555557e-05, "loss": 0.8221, "step": 7542 }, { "epoch": 60.344, "grad_norm": 14.32776165008545, "learning_rate": 2.2071111111111112e-05, "loss": 1.1089, "step": 7543 }, { "epoch": 60.352, "grad_norm": 17.112098693847656, "learning_rate": 2.206666666666667e-05, "loss": 1.0498, "step": 7544 }, { "epoch": 60.36, "grad_norm": 14.569537162780762, "learning_rate": 2.206222222222222e-05, "loss": 0.8318, "step": 7545 }, { "epoch": 60.368, "grad_norm": 19.20022964477539, "learning_rate": 2.205777777777778e-05, "loss": 1.0916, "step": 7546 }, { "epoch": 60.376, "grad_norm": 26.84769630432129, "learning_rate": 2.2053333333333335e-05, "loss": 0.8428, "step": 7547 }, { "epoch": 60.384, "grad_norm": 42.2910041809082, "learning_rate": 2.2048888888888893e-05, "loss": 0.9727, "step": 7548 }, { "epoch": 60.392, "grad_norm": 29.909772872924805, "learning_rate": 2.2044444444444444e-05, "loss": 0.9293, "step": 7549 }, { "epoch": 60.4, "grad_norm": 40.50459671020508, "learning_rate": 2.2040000000000002e-05, "loss": 0.7935, "step": 7550 }, { "epoch": 60.408, "grad_norm": 28.307723999023438, "learning_rate": 2.2035555555555557e-05, "loss": 0.7603, "step": 7551 }, { "epoch": 60.416, "grad_norm": 81.59615325927734, "learning_rate": 2.2031111111111112e-05, "loss": 0.9652, "step": 7552 }, { "epoch": 60.424, "grad_norm": 13.592880249023438, "learning_rate": 2.2026666666666667e-05, "loss": 0.7916, "step": 7553 }, { "epoch": 60.432, "grad_norm": 31.88445281982422, "learning_rate": 2.2022222222222225e-05, "loss": 1.0221, "step": 7554 }, { "epoch": 60.44, "grad_norm": 28.937883377075195, "learning_rate": 2.201777777777778e-05, "loss": 0.7682, "step": 7555 }, { "epoch": 60.448, "grad_norm": 34.25476837158203, "learning_rate": 2.201333333333333e-05, "loss": 1.8389, "step": 7556 }, { "epoch": 60.456, "grad_norm": 90.43238067626953, "learning_rate": 2.200888888888889e-05, "loss": 1.1323, "step": 7557 }, { "epoch": 60.464, "grad_norm": 22.006607055664062, "learning_rate": 2.2004444444444445e-05, "loss": 1.0637, "step": 7558 }, { "epoch": 60.472, "grad_norm": 25.270732879638672, "learning_rate": 2.2000000000000003e-05, "loss": 0.6682, "step": 7559 }, { "epoch": 60.48, "grad_norm": 45.33245849609375, "learning_rate": 2.1995555555555554e-05, "loss": 0.8082, "step": 7560 }, { "epoch": 60.488, "grad_norm": 16.22903060913086, "learning_rate": 2.1991111111111113e-05, "loss": 0.6737, "step": 7561 }, { "epoch": 60.496, "grad_norm": 30.83281898498535, "learning_rate": 2.1986666666666667e-05, "loss": 0.9722, "step": 7562 }, { "epoch": 60.504, "grad_norm": 27.88348388671875, "learning_rate": 2.1982222222222222e-05, "loss": 0.6741, "step": 7563 }, { "epoch": 60.512, "grad_norm": 28.999738693237305, "learning_rate": 2.1977777777777777e-05, "loss": 1.0685, "step": 7564 }, { "epoch": 60.52, "grad_norm": 14.712422370910645, "learning_rate": 2.1973333333333335e-05, "loss": 1.123, "step": 7565 }, { "epoch": 60.528, "grad_norm": 18.662931442260742, "learning_rate": 2.196888888888889e-05, "loss": 0.8607, "step": 7566 }, { "epoch": 60.536, "grad_norm": 17.773141860961914, "learning_rate": 2.1964444444444445e-05, "loss": 1.0267, "step": 7567 }, { "epoch": 60.544, "grad_norm": 17.771520614624023, "learning_rate": 2.196e-05, "loss": 0.7529, "step": 7568 }, { "epoch": 60.552, "grad_norm": 20.05239486694336, "learning_rate": 2.1955555555555558e-05, "loss": 1.0069, "step": 7569 }, { "epoch": 60.56, "grad_norm": 47.927547454833984, "learning_rate": 2.195111111111111e-05, "loss": 0.9817, "step": 7570 }, { "epoch": 60.568, "grad_norm": 20.692184448242188, "learning_rate": 2.1946666666666668e-05, "loss": 0.5035, "step": 7571 }, { "epoch": 60.576, "grad_norm": 26.808134078979492, "learning_rate": 2.1942222222222223e-05, "loss": 0.8306, "step": 7572 }, { "epoch": 60.584, "grad_norm": 19.86953353881836, "learning_rate": 2.193777777777778e-05, "loss": 0.9862, "step": 7573 }, { "epoch": 60.592, "grad_norm": 16.863136291503906, "learning_rate": 2.1933333333333332e-05, "loss": 1.2901, "step": 7574 }, { "epoch": 60.6, "grad_norm": 22.279054641723633, "learning_rate": 2.192888888888889e-05, "loss": 0.7293, "step": 7575 }, { "epoch": 60.608, "grad_norm": 25.850690841674805, "learning_rate": 2.1924444444444445e-05, "loss": 0.9625, "step": 7576 }, { "epoch": 60.616, "grad_norm": 23.171855926513672, "learning_rate": 2.192e-05, "loss": 2.0726, "step": 7577 }, { "epoch": 60.624, "grad_norm": 18.143821716308594, "learning_rate": 2.1915555555555555e-05, "loss": 1.0703, "step": 7578 }, { "epoch": 60.632, "grad_norm": 15.781222343444824, "learning_rate": 2.1911111111111113e-05, "loss": 0.9997, "step": 7579 }, { "epoch": 60.64, "grad_norm": 22.178224563598633, "learning_rate": 2.1906666666666668e-05, "loss": 0.8324, "step": 7580 }, { "epoch": 60.648, "grad_norm": 219.13641357421875, "learning_rate": 2.1902222222222223e-05, "loss": 0.792, "step": 7581 }, { "epoch": 60.656, "grad_norm": 27.60883140563965, "learning_rate": 2.1897777777777778e-05, "loss": 1.277, "step": 7582 }, { "epoch": 60.664, "grad_norm": 39.67715835571289, "learning_rate": 2.1893333333333336e-05, "loss": 1.0678, "step": 7583 }, { "epoch": 60.672, "grad_norm": 98.23468017578125, "learning_rate": 2.188888888888889e-05, "loss": 1.4827, "step": 7584 }, { "epoch": 60.68, "grad_norm": 22.973499298095703, "learning_rate": 2.1884444444444446e-05, "loss": 1.2642, "step": 7585 }, { "epoch": 60.688, "grad_norm": 29.138538360595703, "learning_rate": 2.188e-05, "loss": 0.8283, "step": 7586 }, { "epoch": 60.696, "grad_norm": 77.33560943603516, "learning_rate": 2.187555555555556e-05, "loss": 0.8868, "step": 7587 }, { "epoch": 60.704, "grad_norm": 23.459096908569336, "learning_rate": 2.187111111111111e-05, "loss": 1.1385, "step": 7588 }, { "epoch": 60.712, "grad_norm": 22.640304565429688, "learning_rate": 2.186666666666667e-05, "loss": 0.574, "step": 7589 }, { "epoch": 60.72, "grad_norm": 342.58563232421875, "learning_rate": 2.1862222222222223e-05, "loss": 0.97, "step": 7590 }, { "epoch": 60.728, "grad_norm": 115.1855239868164, "learning_rate": 2.1857777777777778e-05, "loss": 0.7397, "step": 7591 }, { "epoch": 60.736, "grad_norm": 22.68876075744629, "learning_rate": 2.1853333333333333e-05, "loss": 1.3917, "step": 7592 }, { "epoch": 60.744, "grad_norm": 18.93989372253418, "learning_rate": 2.184888888888889e-05, "loss": 0.9514, "step": 7593 }, { "epoch": 60.752, "grad_norm": 42.13949203491211, "learning_rate": 2.1844444444444446e-05, "loss": 0.9517, "step": 7594 }, { "epoch": 60.76, "grad_norm": 38.1055908203125, "learning_rate": 2.184e-05, "loss": 0.9073, "step": 7595 }, { "epoch": 60.768, "grad_norm": 34.34260559082031, "learning_rate": 2.1835555555555556e-05, "loss": 1.1247, "step": 7596 }, { "epoch": 60.776, "grad_norm": 17.064544677734375, "learning_rate": 2.1831111111111114e-05, "loss": 1.0916, "step": 7597 }, { "epoch": 60.784, "grad_norm": 30.5931453704834, "learning_rate": 2.182666666666667e-05, "loss": 0.8138, "step": 7598 }, { "epoch": 60.792, "grad_norm": 19.211084365844727, "learning_rate": 2.1822222222222224e-05, "loss": 0.8488, "step": 7599 }, { "epoch": 60.8, "grad_norm": 37.398868560791016, "learning_rate": 2.181777777777778e-05, "loss": 0.7689, "step": 7600 }, { "epoch": 60.808, "grad_norm": 33.29172897338867, "learning_rate": 2.1813333333333337e-05, "loss": 1.8623, "step": 7601 }, { "epoch": 60.816, "grad_norm": 19.572738647460938, "learning_rate": 2.1808888888888888e-05, "loss": 1.1713, "step": 7602 }, { "epoch": 60.824, "grad_norm": 26.014629364013672, "learning_rate": 2.1804444444444446e-05, "loss": 0.7227, "step": 7603 }, { "epoch": 60.832, "grad_norm": 39.48439407348633, "learning_rate": 2.18e-05, "loss": 0.6516, "step": 7604 }, { "epoch": 60.84, "grad_norm": 173.09518432617188, "learning_rate": 2.179555555555556e-05, "loss": 1.3894, "step": 7605 }, { "epoch": 60.848, "grad_norm": 92.51483917236328, "learning_rate": 2.179111111111111e-05, "loss": 0.9924, "step": 7606 }, { "epoch": 60.856, "grad_norm": 36.140750885009766, "learning_rate": 2.1786666666666666e-05, "loss": 1.2157, "step": 7607 }, { "epoch": 60.864, "grad_norm": 30.725250244140625, "learning_rate": 2.1782222222222224e-05, "loss": 0.76, "step": 7608 }, { "epoch": 60.872, "grad_norm": 33.584232330322266, "learning_rate": 2.177777777777778e-05, "loss": 0.7899, "step": 7609 }, { "epoch": 60.88, "grad_norm": 29.900663375854492, "learning_rate": 2.1773333333333334e-05, "loss": 0.8244, "step": 7610 }, { "epoch": 60.888, "grad_norm": 48.194969177246094, "learning_rate": 2.176888888888889e-05, "loss": 0.7822, "step": 7611 }, { "epoch": 60.896, "grad_norm": 24.39315414428711, "learning_rate": 2.1764444444444447e-05, "loss": 0.8599, "step": 7612 }, { "epoch": 60.904, "grad_norm": 34.625831604003906, "learning_rate": 2.176e-05, "loss": 0.77, "step": 7613 }, { "epoch": 60.912, "grad_norm": 17.743989944458008, "learning_rate": 2.1755555555555557e-05, "loss": 0.728, "step": 7614 }, { "epoch": 60.92, "grad_norm": 15.698692321777344, "learning_rate": 2.175111111111111e-05, "loss": 1.7347, "step": 7615 }, { "epoch": 60.928, "grad_norm": 24.426746368408203, "learning_rate": 2.174666666666667e-05, "loss": 0.8154, "step": 7616 }, { "epoch": 60.936, "grad_norm": 20.141929626464844, "learning_rate": 2.174222222222222e-05, "loss": 0.6522, "step": 7617 }, { "epoch": 60.944, "grad_norm": 31.07170867919922, "learning_rate": 2.173777777777778e-05, "loss": 1.4337, "step": 7618 }, { "epoch": 60.952, "grad_norm": 49.41334533691406, "learning_rate": 2.1733333333333334e-05, "loss": 0.7348, "step": 7619 }, { "epoch": 60.96, "grad_norm": 22.901634216308594, "learning_rate": 2.172888888888889e-05, "loss": 1.0839, "step": 7620 }, { "epoch": 60.968, "grad_norm": 36.77216339111328, "learning_rate": 2.1724444444444444e-05, "loss": 1.0522, "step": 7621 }, { "epoch": 60.976, "grad_norm": 44.452064514160156, "learning_rate": 2.1720000000000002e-05, "loss": 1.1733, "step": 7622 }, { "epoch": 60.984, "grad_norm": 39.44871520996094, "learning_rate": 2.1715555555555557e-05, "loss": 1.029, "step": 7623 }, { "epoch": 60.992, "grad_norm": 33.09893798828125, "learning_rate": 2.1711111111111112e-05, "loss": 1.8972, "step": 7624 }, { "epoch": 61.0, "grad_norm": 29.777740478515625, "learning_rate": 2.1706666666666667e-05, "loss": 1.1184, "step": 7625 }, { "epoch": 61.0, "eval_loss": 1.0411689281463623, "eval_map": 0.4115, "eval_map_50": 0.7577, "eval_map_75": 0.3825, "eval_map_Coverall": 0.6709, "eval_map_Face_Shield": 0.4304, "eval_map_Gloves": 0.3925, "eval_map_Goggles": 0.1563, "eval_map_Mask": 0.4074, "eval_map_large": 0.6284, "eval_map_medium": 0.2553, "eval_map_small": 0.2815, "eval_mar_1": 0.3255, "eval_mar_10": 0.549, "eval_mar_100": 0.5639, "eval_mar_100_Coverall": 0.7378, "eval_mar_100_Face_Shield": 0.6706, "eval_mar_100_Gloves": 0.4951, "eval_mar_100_Goggles": 0.4313, "eval_mar_100_Mask": 0.4846, "eval_mar_large": 0.7268, "eval_mar_medium": 0.4242, "eval_mar_small": 0.3623, "eval_runtime": 0.9141, "eval_samples_per_second": 31.725, "eval_steps_per_second": 2.188, "step": 7625 }, { "epoch": 61.008, "grad_norm": 124.8543472290039, "learning_rate": 2.1702222222222225e-05, "loss": 0.9316, "step": 7626 }, { "epoch": 61.016, "grad_norm": 28.517759323120117, "learning_rate": 2.1697777777777776e-05, "loss": 1.0181, "step": 7627 }, { "epoch": 61.024, "grad_norm": 29.430068969726562, "learning_rate": 2.1693333333333335e-05, "loss": 1.0486, "step": 7628 }, { "epoch": 61.032, "grad_norm": 46.44782257080078, "learning_rate": 2.168888888888889e-05, "loss": 0.9842, "step": 7629 }, { "epoch": 61.04, "grad_norm": 25.457448959350586, "learning_rate": 2.1684444444444448e-05, "loss": 1.0715, "step": 7630 }, { "epoch": 61.048, "grad_norm": 41.7519645690918, "learning_rate": 2.168e-05, "loss": 1.0051, "step": 7631 }, { "epoch": 61.056, "grad_norm": 45.84041213989258, "learning_rate": 2.1675555555555557e-05, "loss": 1.678, "step": 7632 }, { "epoch": 61.064, "grad_norm": 30.853593826293945, "learning_rate": 2.1671111111111112e-05, "loss": 1.2486, "step": 7633 }, { "epoch": 61.072, "grad_norm": 19.51881980895996, "learning_rate": 2.1666666666666667e-05, "loss": 1.1641, "step": 7634 }, { "epoch": 61.08, "grad_norm": 33.679115295410156, "learning_rate": 2.1662222222222222e-05, "loss": 0.9439, "step": 7635 }, { "epoch": 61.088, "grad_norm": 46.028133392333984, "learning_rate": 2.165777777777778e-05, "loss": 0.9146, "step": 7636 }, { "epoch": 61.096, "grad_norm": 38.18170928955078, "learning_rate": 2.1653333333333335e-05, "loss": 1.0728, "step": 7637 }, { "epoch": 61.104, "grad_norm": 20.413537979125977, "learning_rate": 2.164888888888889e-05, "loss": 0.7961, "step": 7638 }, { "epoch": 61.112, "grad_norm": 44.72502517700195, "learning_rate": 2.1644444444444445e-05, "loss": 0.6354, "step": 7639 }, { "epoch": 61.12, "grad_norm": 43.463294982910156, "learning_rate": 2.1640000000000003e-05, "loss": 0.6684, "step": 7640 }, { "epoch": 61.128, "grad_norm": 24.835655212402344, "learning_rate": 2.1635555555555558e-05, "loss": 0.9013, "step": 7641 }, { "epoch": 61.136, "grad_norm": 28.372278213500977, "learning_rate": 2.1631111111111113e-05, "loss": 0.9097, "step": 7642 }, { "epoch": 61.144, "grad_norm": 390.11767578125, "learning_rate": 2.1626666666666667e-05, "loss": 0.6567, "step": 7643 }, { "epoch": 61.152, "grad_norm": 77.26348114013672, "learning_rate": 2.1622222222222226e-05, "loss": 1.2134, "step": 7644 }, { "epoch": 61.16, "grad_norm": 24.789403915405273, "learning_rate": 2.1617777777777777e-05, "loss": 1.3132, "step": 7645 }, { "epoch": 61.168, "grad_norm": 23.042434692382812, "learning_rate": 2.1613333333333335e-05, "loss": 0.7554, "step": 7646 }, { "epoch": 61.176, "grad_norm": 56.108219146728516, "learning_rate": 2.160888888888889e-05, "loss": 0.9506, "step": 7647 }, { "epoch": 61.184, "grad_norm": 18.530988693237305, "learning_rate": 2.1604444444444445e-05, "loss": 1.098, "step": 7648 }, { "epoch": 61.192, "grad_norm": 34.63721466064453, "learning_rate": 2.16e-05, "loss": 0.667, "step": 7649 }, { "epoch": 61.2, "grad_norm": 27.912826538085938, "learning_rate": 2.1595555555555558e-05, "loss": 0.6661, "step": 7650 }, { "epoch": 61.208, "grad_norm": 39.039695739746094, "learning_rate": 2.1591111111111113e-05, "loss": 0.6553, "step": 7651 }, { "epoch": 61.216, "grad_norm": 44.27607345581055, "learning_rate": 2.1586666666666668e-05, "loss": 1.2847, "step": 7652 }, { "epoch": 61.224, "grad_norm": 24.934768676757812, "learning_rate": 2.1582222222222223e-05, "loss": 1.1845, "step": 7653 }, { "epoch": 61.232, "grad_norm": 28.911989212036133, "learning_rate": 2.157777777777778e-05, "loss": 0.7705, "step": 7654 }, { "epoch": 61.24, "grad_norm": 61.79587173461914, "learning_rate": 2.1573333333333336e-05, "loss": 0.8152, "step": 7655 }, { "epoch": 61.248, "grad_norm": 21.96062660217285, "learning_rate": 2.156888888888889e-05, "loss": 0.8579, "step": 7656 }, { "epoch": 61.256, "grad_norm": 44.47627639770508, "learning_rate": 2.1564444444444445e-05, "loss": 1.0439, "step": 7657 }, { "epoch": 61.264, "grad_norm": 54.10699462890625, "learning_rate": 2.1560000000000004e-05, "loss": 0.7403, "step": 7658 }, { "epoch": 61.272, "grad_norm": 78.8151626586914, "learning_rate": 2.1555555555555555e-05, "loss": 1.2027, "step": 7659 }, { "epoch": 61.28, "grad_norm": 23.250425338745117, "learning_rate": 2.155111111111111e-05, "loss": 1.3034, "step": 7660 }, { "epoch": 61.288, "grad_norm": 32.50940704345703, "learning_rate": 2.1546666666666668e-05, "loss": 0.8269, "step": 7661 }, { "epoch": 61.296, "grad_norm": 24.382362365722656, "learning_rate": 2.1542222222222223e-05, "loss": 0.7625, "step": 7662 }, { "epoch": 61.304, "grad_norm": 19.498493194580078, "learning_rate": 2.1537777777777778e-05, "loss": 0.6187, "step": 7663 }, { "epoch": 61.312, "grad_norm": 17.975223541259766, "learning_rate": 2.1533333333333333e-05, "loss": 1.0977, "step": 7664 }, { "epoch": 61.32, "grad_norm": 17.658048629760742, "learning_rate": 2.152888888888889e-05, "loss": 0.8457, "step": 7665 }, { "epoch": 61.328, "grad_norm": 51.921905517578125, "learning_rate": 2.1524444444444446e-05, "loss": 0.8428, "step": 7666 }, { "epoch": 61.336, "grad_norm": 15.84300422668457, "learning_rate": 2.152e-05, "loss": 0.9, "step": 7667 }, { "epoch": 61.344, "grad_norm": 60.39474105834961, "learning_rate": 2.1515555555555555e-05, "loss": 1.0764, "step": 7668 }, { "epoch": 61.352, "grad_norm": 75.37223815917969, "learning_rate": 2.1511111111111114e-05, "loss": 0.7489, "step": 7669 }, { "epoch": 61.36, "grad_norm": 33.14938735961914, "learning_rate": 2.1506666666666665e-05, "loss": 0.9323, "step": 7670 }, { "epoch": 61.368, "grad_norm": 32.400848388671875, "learning_rate": 2.1502222222222223e-05, "loss": 0.7564, "step": 7671 }, { "epoch": 61.376, "grad_norm": 33.33446502685547, "learning_rate": 2.1497777777777778e-05, "loss": 1.1325, "step": 7672 }, { "epoch": 61.384, "grad_norm": 23.79283905029297, "learning_rate": 2.1493333333333333e-05, "loss": 0.761, "step": 7673 }, { "epoch": 61.392, "grad_norm": 39.537391662597656, "learning_rate": 2.1488888888888888e-05, "loss": 0.9212, "step": 7674 }, { "epoch": 61.4, "grad_norm": 21.658388137817383, "learning_rate": 2.1484444444444446e-05, "loss": 1.3533, "step": 7675 }, { "epoch": 61.408, "grad_norm": 21.123897552490234, "learning_rate": 2.148e-05, "loss": 1.2956, "step": 7676 }, { "epoch": 61.416, "grad_norm": 19.92024040222168, "learning_rate": 2.1475555555555556e-05, "loss": 0.8561, "step": 7677 }, { "epoch": 61.424, "grad_norm": 36.08334732055664, "learning_rate": 2.147111111111111e-05, "loss": 0.8034, "step": 7678 }, { "epoch": 61.432, "grad_norm": 26.81093406677246, "learning_rate": 2.146666666666667e-05, "loss": 1.3681, "step": 7679 }, { "epoch": 61.44, "grad_norm": 32.4589958190918, "learning_rate": 2.1462222222222224e-05, "loss": 1.2482, "step": 7680 }, { "epoch": 61.448, "grad_norm": 34.27295684814453, "learning_rate": 2.145777777777778e-05, "loss": 0.8328, "step": 7681 }, { "epoch": 61.456, "grad_norm": 28.98644256591797, "learning_rate": 2.1453333333333333e-05, "loss": 0.7244, "step": 7682 }, { "epoch": 61.464, "grad_norm": 15.535337448120117, "learning_rate": 2.1448888888888892e-05, "loss": 0.8331, "step": 7683 }, { "epoch": 61.472, "grad_norm": 19.807769775390625, "learning_rate": 2.1444444444444443e-05, "loss": 0.9389, "step": 7684 }, { "epoch": 61.48, "grad_norm": 24.992748260498047, "learning_rate": 2.144e-05, "loss": 0.9652, "step": 7685 }, { "epoch": 61.488, "grad_norm": 24.493892669677734, "learning_rate": 2.1435555555555556e-05, "loss": 1.1811, "step": 7686 }, { "epoch": 61.496, "grad_norm": 98.27379608154297, "learning_rate": 2.1431111111111114e-05, "loss": 1.3627, "step": 7687 }, { "epoch": 61.504, "grad_norm": 38.988040924072266, "learning_rate": 2.1426666666666666e-05, "loss": 0.981, "step": 7688 }, { "epoch": 61.512, "grad_norm": 20.960933685302734, "learning_rate": 2.1422222222222224e-05, "loss": 0.9816, "step": 7689 }, { "epoch": 61.52, "grad_norm": 27.65400505065918, "learning_rate": 2.141777777777778e-05, "loss": 1.7064, "step": 7690 }, { "epoch": 61.528, "grad_norm": 26.37383270263672, "learning_rate": 2.1413333333333334e-05, "loss": 1.0315, "step": 7691 }, { "epoch": 61.536, "grad_norm": 25.104785919189453, "learning_rate": 2.140888888888889e-05, "loss": 0.8496, "step": 7692 }, { "epoch": 61.544, "grad_norm": 24.803007125854492, "learning_rate": 2.1404444444444447e-05, "loss": 0.7539, "step": 7693 }, { "epoch": 61.552, "grad_norm": 31.692115783691406, "learning_rate": 2.1400000000000002e-05, "loss": 0.7299, "step": 7694 }, { "epoch": 61.56, "grad_norm": 87.72900390625, "learning_rate": 2.1395555555555557e-05, "loss": 1.0179, "step": 7695 }, { "epoch": 61.568, "grad_norm": 30.714580535888672, "learning_rate": 2.139111111111111e-05, "loss": 0.8752, "step": 7696 }, { "epoch": 61.576, "grad_norm": 23.0831356048584, "learning_rate": 2.138666666666667e-05, "loss": 1.0631, "step": 7697 }, { "epoch": 61.584, "grad_norm": 29.959196090698242, "learning_rate": 2.1382222222222225e-05, "loss": 1.0846, "step": 7698 }, { "epoch": 61.592, "grad_norm": 14.463927268981934, "learning_rate": 2.137777777777778e-05, "loss": 1.1098, "step": 7699 }, { "epoch": 61.6, "grad_norm": 19.28142738342285, "learning_rate": 2.1373333333333334e-05, "loss": 1.2209, "step": 7700 }, { "epoch": 61.608, "grad_norm": 44.172630310058594, "learning_rate": 2.1368888888888892e-05, "loss": 0.8227, "step": 7701 }, { "epoch": 61.616, "grad_norm": 29.232288360595703, "learning_rate": 2.1364444444444444e-05, "loss": 0.9865, "step": 7702 }, { "epoch": 61.624, "grad_norm": 49.593109130859375, "learning_rate": 2.1360000000000002e-05, "loss": 1.1054, "step": 7703 }, { "epoch": 61.632, "grad_norm": 18.19231605529785, "learning_rate": 2.1355555555555557e-05, "loss": 0.7581, "step": 7704 }, { "epoch": 61.64, "grad_norm": 38.356605529785156, "learning_rate": 2.1351111111111112e-05, "loss": 0.8477, "step": 7705 }, { "epoch": 61.648, "grad_norm": 13.891463279724121, "learning_rate": 2.1346666666666667e-05, "loss": 0.9112, "step": 7706 }, { "epoch": 61.656, "grad_norm": 30.822586059570312, "learning_rate": 2.1342222222222225e-05, "loss": 0.7221, "step": 7707 }, { "epoch": 61.664, "grad_norm": 44.5388069152832, "learning_rate": 2.133777777777778e-05, "loss": 0.7675, "step": 7708 }, { "epoch": 61.672, "grad_norm": 17.436180114746094, "learning_rate": 2.1333333333333335e-05, "loss": 0.9339, "step": 7709 }, { "epoch": 61.68, "grad_norm": 19.912702560424805, "learning_rate": 2.132888888888889e-05, "loss": 1.1765, "step": 7710 }, { "epoch": 61.688, "grad_norm": 25.306116104125977, "learning_rate": 2.1324444444444444e-05, "loss": 0.6748, "step": 7711 }, { "epoch": 61.696, "grad_norm": 38.036861419677734, "learning_rate": 2.1320000000000003e-05, "loss": 3.4846, "step": 7712 }, { "epoch": 61.704, "grad_norm": 15.757493019104004, "learning_rate": 2.1315555555555554e-05, "loss": 0.9513, "step": 7713 }, { "epoch": 61.712, "grad_norm": 37.8088493347168, "learning_rate": 2.1311111111111112e-05, "loss": 1.0052, "step": 7714 }, { "epoch": 61.72, "grad_norm": 40.57533645629883, "learning_rate": 2.1306666666666667e-05, "loss": 1.4978, "step": 7715 }, { "epoch": 61.728, "grad_norm": 31.03116226196289, "learning_rate": 2.1302222222222222e-05, "loss": 1.0155, "step": 7716 }, { "epoch": 61.736, "grad_norm": 82.53225708007812, "learning_rate": 2.1297777777777777e-05, "loss": 1.1205, "step": 7717 }, { "epoch": 61.744, "grad_norm": 35.62765884399414, "learning_rate": 2.1293333333333335e-05, "loss": 0.9281, "step": 7718 }, { "epoch": 61.752, "grad_norm": 46.660621643066406, "learning_rate": 2.128888888888889e-05, "loss": 0.8908, "step": 7719 }, { "epoch": 61.76, "grad_norm": 23.06438446044922, "learning_rate": 2.1284444444444445e-05, "loss": 0.9276, "step": 7720 }, { "epoch": 61.768, "grad_norm": 37.68794631958008, "learning_rate": 2.128e-05, "loss": 1.0184, "step": 7721 }, { "epoch": 61.776, "grad_norm": 54.40616989135742, "learning_rate": 2.1275555555555558e-05, "loss": 1.1766, "step": 7722 }, { "epoch": 61.784, "grad_norm": 21.632741928100586, "learning_rate": 2.1271111111111113e-05, "loss": 0.9947, "step": 7723 }, { "epoch": 61.792, "grad_norm": 31.399869918823242, "learning_rate": 2.1266666666666667e-05, "loss": 1.0283, "step": 7724 }, { "epoch": 61.8, "grad_norm": 20.391311645507812, "learning_rate": 2.1262222222222222e-05, "loss": 1.1308, "step": 7725 }, { "epoch": 61.808, "grad_norm": 50.589046478271484, "learning_rate": 2.125777777777778e-05, "loss": 0.7944, "step": 7726 }, { "epoch": 61.816, "grad_norm": 53.62118148803711, "learning_rate": 2.1253333333333332e-05, "loss": 1.0009, "step": 7727 }, { "epoch": 61.824, "grad_norm": 37.096473693847656, "learning_rate": 2.124888888888889e-05, "loss": 1.0839, "step": 7728 }, { "epoch": 61.832, "grad_norm": 11.717815399169922, "learning_rate": 2.1244444444444445e-05, "loss": 0.7889, "step": 7729 }, { "epoch": 61.84, "grad_norm": 23.628639221191406, "learning_rate": 2.124e-05, "loss": 0.8283, "step": 7730 }, { "epoch": 61.848, "grad_norm": 43.625362396240234, "learning_rate": 2.1235555555555555e-05, "loss": 1.1261, "step": 7731 }, { "epoch": 61.856, "grad_norm": 18.311298370361328, "learning_rate": 2.1231111111111113e-05, "loss": 0.6455, "step": 7732 }, { "epoch": 61.864, "grad_norm": 31.588424682617188, "learning_rate": 2.1226666666666668e-05, "loss": 1.1321, "step": 7733 }, { "epoch": 61.872, "grad_norm": 58.183902740478516, "learning_rate": 2.1222222222222223e-05, "loss": 1.3963, "step": 7734 }, { "epoch": 61.88, "grad_norm": 107.35686492919922, "learning_rate": 2.1217777777777778e-05, "loss": 2.1233, "step": 7735 }, { "epoch": 61.888, "grad_norm": 19.01116943359375, "learning_rate": 2.1213333333333336e-05, "loss": 1.0529, "step": 7736 }, { "epoch": 61.896, "grad_norm": 16.902524948120117, "learning_rate": 2.120888888888889e-05, "loss": 0.9323, "step": 7737 }, { "epoch": 61.904, "grad_norm": 21.599184036254883, "learning_rate": 2.1204444444444445e-05, "loss": 0.9692, "step": 7738 }, { "epoch": 61.912, "grad_norm": 19.669767379760742, "learning_rate": 2.12e-05, "loss": 0.9288, "step": 7739 }, { "epoch": 61.92, "grad_norm": 35.91121292114258, "learning_rate": 2.119555555555556e-05, "loss": 1.0403, "step": 7740 }, { "epoch": 61.928, "grad_norm": 24.332069396972656, "learning_rate": 2.119111111111111e-05, "loss": 0.9896, "step": 7741 }, { "epoch": 61.936, "grad_norm": 39.91200637817383, "learning_rate": 2.1186666666666668e-05, "loss": 1.9632, "step": 7742 }, { "epoch": 61.944, "grad_norm": 90.43396759033203, "learning_rate": 2.1182222222222223e-05, "loss": 1.1932, "step": 7743 }, { "epoch": 61.952, "grad_norm": 22.51661491394043, "learning_rate": 2.117777777777778e-05, "loss": 1.2757, "step": 7744 }, { "epoch": 61.96, "grad_norm": 16.125638961791992, "learning_rate": 2.1173333333333333e-05, "loss": 1.1955, "step": 7745 }, { "epoch": 61.968, "grad_norm": 23.282068252563477, "learning_rate": 2.116888888888889e-05, "loss": 0.8376, "step": 7746 }, { "epoch": 61.976, "grad_norm": 20.46410369873047, "learning_rate": 2.1164444444444446e-05, "loss": 1.0283, "step": 7747 }, { "epoch": 61.984, "grad_norm": 19.477758407592773, "learning_rate": 2.116e-05, "loss": 0.8207, "step": 7748 }, { "epoch": 61.992, "grad_norm": 16.564231872558594, "learning_rate": 2.1155555555555556e-05, "loss": 0.7496, "step": 7749 }, { "epoch": 62.0, "grad_norm": 110.06037139892578, "learning_rate": 2.1151111111111114e-05, "loss": 1.0368, "step": 7750 }, { "epoch": 62.0, "eval_loss": 0.9717561602592468, "eval_map": 0.4276, "eval_map_50": 0.764, "eval_map_75": 0.3977, "eval_map_Coverall": 0.6823, "eval_map_Face_Shield": 0.4798, "eval_map_Gloves": 0.3775, "eval_map_Goggles": 0.1802, "eval_map_Mask": 0.418, "eval_map_large": 0.6332, "eval_map_medium": 0.2879, "eval_map_small": 0.3363, "eval_mar_1": 0.3308, "eval_mar_10": 0.5696, "eval_mar_100": 0.5901, "eval_mar_100_Coverall": 0.7622, "eval_mar_100_Face_Shield": 0.7118, "eval_mar_100_Gloves": 0.4967, "eval_mar_100_Goggles": 0.4719, "eval_mar_100_Mask": 0.5077, "eval_mar_large": 0.7352, "eval_mar_medium": 0.4515, "eval_mar_small": 0.4209, "eval_runtime": 0.9212, "eval_samples_per_second": 31.482, "eval_steps_per_second": 2.171, "step": 7750 }, { "epoch": 62.008, "grad_norm": 314.2630310058594, "learning_rate": 2.114666666666667e-05, "loss": 0.9588, "step": 7751 }, { "epoch": 62.016, "grad_norm": 25.179763793945312, "learning_rate": 2.1142222222222223e-05, "loss": 1.1528, "step": 7752 }, { "epoch": 62.024, "grad_norm": 24.140499114990234, "learning_rate": 2.113777777777778e-05, "loss": 1.128, "step": 7753 }, { "epoch": 62.032, "grad_norm": 17.528696060180664, "learning_rate": 2.1133333333333337e-05, "loss": 0.9893, "step": 7754 }, { "epoch": 62.04, "grad_norm": 17.636167526245117, "learning_rate": 2.112888888888889e-05, "loss": 1.1152, "step": 7755 }, { "epoch": 62.048, "grad_norm": 46.48527526855469, "learning_rate": 2.1124444444444446e-05, "loss": 1.0251, "step": 7756 }, { "epoch": 62.056, "grad_norm": 26.05591583251953, "learning_rate": 2.112e-05, "loss": 0.8192, "step": 7757 }, { "epoch": 62.064, "grad_norm": 45.70153045654297, "learning_rate": 2.111555555555556e-05, "loss": 0.9285, "step": 7758 }, { "epoch": 62.072, "grad_norm": 29.909618377685547, "learning_rate": 2.111111111111111e-05, "loss": 0.8638, "step": 7759 }, { "epoch": 62.08, "grad_norm": 26.01980972290039, "learning_rate": 2.110666666666667e-05, "loss": 1.1658, "step": 7760 }, { "epoch": 62.088, "grad_norm": 20.71295928955078, "learning_rate": 2.1102222222222224e-05, "loss": 1.0662, "step": 7761 }, { "epoch": 62.096, "grad_norm": 16.376262664794922, "learning_rate": 2.109777777777778e-05, "loss": 0.7361, "step": 7762 }, { "epoch": 62.104, "grad_norm": 17.112552642822266, "learning_rate": 2.1093333333333334e-05, "loss": 0.615, "step": 7763 }, { "epoch": 62.112, "grad_norm": 14.545815467834473, "learning_rate": 2.108888888888889e-05, "loss": 0.8843, "step": 7764 }, { "epoch": 62.12, "grad_norm": 30.39580726623535, "learning_rate": 2.1084444444444447e-05, "loss": 1.199, "step": 7765 }, { "epoch": 62.128, "grad_norm": 12.194021224975586, "learning_rate": 2.1079999999999998e-05, "loss": 0.7391, "step": 7766 }, { "epoch": 62.136, "grad_norm": 15.721214294433594, "learning_rate": 2.1075555555555556e-05, "loss": 0.6503, "step": 7767 }, { "epoch": 62.144, "grad_norm": 36.24397277832031, "learning_rate": 2.107111111111111e-05, "loss": 1.1632, "step": 7768 }, { "epoch": 62.152, "grad_norm": 39.944488525390625, "learning_rate": 2.106666666666667e-05, "loss": 1.0793, "step": 7769 }, { "epoch": 62.16, "grad_norm": 22.500961303710938, "learning_rate": 2.106222222222222e-05, "loss": 0.8707, "step": 7770 }, { "epoch": 62.168, "grad_norm": 21.789087295532227, "learning_rate": 2.105777777777778e-05, "loss": 0.9108, "step": 7771 }, { "epoch": 62.176, "grad_norm": 403.3607482910156, "learning_rate": 2.1053333333333334e-05, "loss": 1.3117, "step": 7772 }, { "epoch": 62.184, "grad_norm": 21.363059997558594, "learning_rate": 2.104888888888889e-05, "loss": 0.8215, "step": 7773 }, { "epoch": 62.192, "grad_norm": 23.94342803955078, "learning_rate": 2.1044444444444444e-05, "loss": 0.9378, "step": 7774 }, { "epoch": 62.2, "grad_norm": 25.845491409301758, "learning_rate": 2.1040000000000002e-05, "loss": 0.8254, "step": 7775 }, { "epoch": 62.208, "grad_norm": 18.337825775146484, "learning_rate": 2.1035555555555557e-05, "loss": 0.7858, "step": 7776 }, { "epoch": 62.216, "grad_norm": 18.97481918334961, "learning_rate": 2.103111111111111e-05, "loss": 0.9824, "step": 7777 }, { "epoch": 62.224, "grad_norm": 20.64912986755371, "learning_rate": 2.1026666666666666e-05, "loss": 1.0491, "step": 7778 }, { "epoch": 62.232, "grad_norm": 38.6114501953125, "learning_rate": 2.1022222222222225e-05, "loss": 0.9487, "step": 7779 }, { "epoch": 62.24, "grad_norm": 34.23268508911133, "learning_rate": 2.101777777777778e-05, "loss": 1.1793, "step": 7780 }, { "epoch": 62.248, "grad_norm": 39.64524841308594, "learning_rate": 2.1013333333333334e-05, "loss": 1.2115, "step": 7781 }, { "epoch": 62.256, "grad_norm": 29.616727828979492, "learning_rate": 2.100888888888889e-05, "loss": 0.7371, "step": 7782 }, { "epoch": 62.264, "grad_norm": 24.682262420654297, "learning_rate": 2.1004444444444447e-05, "loss": 0.8627, "step": 7783 }, { "epoch": 62.272, "grad_norm": 34.64628219604492, "learning_rate": 2.1e-05, "loss": 0.7483, "step": 7784 }, { "epoch": 62.28, "grad_norm": 27.837013244628906, "learning_rate": 2.0995555555555557e-05, "loss": 0.6004, "step": 7785 }, { "epoch": 62.288, "grad_norm": 39.179595947265625, "learning_rate": 2.0991111111111112e-05, "loss": 1.1026, "step": 7786 }, { "epoch": 62.296, "grad_norm": 53.86969757080078, "learning_rate": 2.0986666666666667e-05, "loss": 1.3759, "step": 7787 }, { "epoch": 62.304, "grad_norm": 32.6583251953125, "learning_rate": 2.098222222222222e-05, "loss": 0.8142, "step": 7788 }, { "epoch": 62.312, "grad_norm": 16.7626895904541, "learning_rate": 2.097777777777778e-05, "loss": 0.7852, "step": 7789 }, { "epoch": 62.32, "grad_norm": 115.25968933105469, "learning_rate": 2.0973333333333335e-05, "loss": 3.1954, "step": 7790 }, { "epoch": 62.328, "grad_norm": 16.119239807128906, "learning_rate": 2.096888888888889e-05, "loss": 0.8371, "step": 7791 }, { "epoch": 62.336, "grad_norm": 20.79436492919922, "learning_rate": 2.0964444444444444e-05, "loss": 0.8127, "step": 7792 }, { "epoch": 62.344, "grad_norm": 23.68978500366211, "learning_rate": 2.0960000000000003e-05, "loss": 0.9011, "step": 7793 }, { "epoch": 62.352, "grad_norm": 12.746877670288086, "learning_rate": 2.0955555555555557e-05, "loss": 0.7123, "step": 7794 }, { "epoch": 62.36, "grad_norm": 17.26492691040039, "learning_rate": 2.0951111111111112e-05, "loss": 0.9748, "step": 7795 }, { "epoch": 62.368, "grad_norm": 15.599081993103027, "learning_rate": 2.0946666666666667e-05, "loss": 0.9584, "step": 7796 }, { "epoch": 62.376, "grad_norm": 32.35524368286133, "learning_rate": 2.0942222222222225e-05, "loss": 1.0727, "step": 7797 }, { "epoch": 62.384, "grad_norm": 42.6962776184082, "learning_rate": 2.0937777777777777e-05, "loss": 0.706, "step": 7798 }, { "epoch": 62.392, "grad_norm": 32.522735595703125, "learning_rate": 2.0933333333333335e-05, "loss": 0.928, "step": 7799 }, { "epoch": 62.4, "grad_norm": 36.87644958496094, "learning_rate": 2.092888888888889e-05, "loss": 1.681, "step": 7800 }, { "epoch": 62.408, "grad_norm": 22.3842830657959, "learning_rate": 2.0924444444444448e-05, "loss": 0.8507, "step": 7801 }, { "epoch": 62.416, "grad_norm": 14.340765953063965, "learning_rate": 2.092e-05, "loss": 0.7389, "step": 7802 }, { "epoch": 62.424, "grad_norm": 22.871427536010742, "learning_rate": 2.0915555555555558e-05, "loss": 0.8093, "step": 7803 }, { "epoch": 62.432, "grad_norm": 43.064476013183594, "learning_rate": 2.0911111111111113e-05, "loss": 0.9182, "step": 7804 }, { "epoch": 62.44, "grad_norm": 29.269968032836914, "learning_rate": 2.0906666666666668e-05, "loss": 0.6766, "step": 7805 }, { "epoch": 62.448, "grad_norm": 29.370677947998047, "learning_rate": 2.0902222222222222e-05, "loss": 0.8212, "step": 7806 }, { "epoch": 62.456, "grad_norm": 75.78179931640625, "learning_rate": 2.089777777777778e-05, "loss": 0.5879, "step": 7807 }, { "epoch": 62.464, "grad_norm": 37.543338775634766, "learning_rate": 2.0893333333333335e-05, "loss": 0.8006, "step": 7808 }, { "epoch": 62.472, "grad_norm": 61.98402786254883, "learning_rate": 2.088888888888889e-05, "loss": 1.1734, "step": 7809 }, { "epoch": 62.48, "grad_norm": 29.972185134887695, "learning_rate": 2.0884444444444445e-05, "loss": 0.9267, "step": 7810 }, { "epoch": 62.488, "grad_norm": 28.2674560546875, "learning_rate": 2.0880000000000003e-05, "loss": 1.2184, "step": 7811 }, { "epoch": 62.496, "grad_norm": 19.588451385498047, "learning_rate": 2.0875555555555558e-05, "loss": 0.6391, "step": 7812 }, { "epoch": 62.504, "grad_norm": 47.7152214050293, "learning_rate": 2.087111111111111e-05, "loss": 0.897, "step": 7813 }, { "epoch": 62.512, "grad_norm": 32.49304962158203, "learning_rate": 2.0866666666666668e-05, "loss": 1.0251, "step": 7814 }, { "epoch": 62.52, "grad_norm": 16.374921798706055, "learning_rate": 2.0862222222222223e-05, "loss": 1.0084, "step": 7815 }, { "epoch": 62.528, "grad_norm": 37.032108306884766, "learning_rate": 2.0857777777777778e-05, "loss": 1.1847, "step": 7816 }, { "epoch": 62.536, "grad_norm": 13.805606842041016, "learning_rate": 2.0853333333333332e-05, "loss": 1.0207, "step": 7817 }, { "epoch": 62.544, "grad_norm": 29.94131088256836, "learning_rate": 2.084888888888889e-05, "loss": 0.9934, "step": 7818 }, { "epoch": 62.552, "grad_norm": 22.363245010375977, "learning_rate": 2.0844444444444446e-05, "loss": 1.2865, "step": 7819 }, { "epoch": 62.56, "grad_norm": 30.375354766845703, "learning_rate": 2.084e-05, "loss": 1.1135, "step": 7820 }, { "epoch": 62.568, "grad_norm": 22.391544342041016, "learning_rate": 2.0835555555555555e-05, "loss": 0.9515, "step": 7821 }, { "epoch": 62.576, "grad_norm": 43.227542877197266, "learning_rate": 2.0831111111111113e-05, "loss": 1.3742, "step": 7822 }, { "epoch": 62.584, "grad_norm": 38.082923889160156, "learning_rate": 2.0826666666666665e-05, "loss": 0.7116, "step": 7823 }, { "epoch": 62.592, "grad_norm": 24.88717269897461, "learning_rate": 2.0822222222222223e-05, "loss": 0.985, "step": 7824 }, { "epoch": 62.6, "grad_norm": 28.652265548706055, "learning_rate": 2.0817777777777778e-05, "loss": 0.9341, "step": 7825 }, { "epoch": 62.608, "grad_norm": 36.36897277832031, "learning_rate": 2.0813333333333336e-05, "loss": 0.8378, "step": 7826 }, { "epoch": 62.616, "grad_norm": 23.609874725341797, "learning_rate": 2.0808888888888888e-05, "loss": 0.9764, "step": 7827 }, { "epoch": 62.624, "grad_norm": 22.53949546813965, "learning_rate": 2.0804444444444446e-05, "loss": 1.0169, "step": 7828 }, { "epoch": 62.632, "grad_norm": 42.18075942993164, "learning_rate": 2.08e-05, "loss": 0.6237, "step": 7829 }, { "epoch": 62.64, "grad_norm": 31.27570343017578, "learning_rate": 2.0795555555555556e-05, "loss": 2.3854, "step": 7830 }, { "epoch": 62.648, "grad_norm": 40.49319076538086, "learning_rate": 2.079111111111111e-05, "loss": 0.7918, "step": 7831 }, { "epoch": 62.656, "grad_norm": 16.851469039916992, "learning_rate": 2.078666666666667e-05, "loss": 0.7553, "step": 7832 }, { "epoch": 62.664, "grad_norm": 19.782764434814453, "learning_rate": 2.0782222222222224e-05, "loss": 0.8588, "step": 7833 }, { "epoch": 62.672, "grad_norm": 103.69627380371094, "learning_rate": 2.077777777777778e-05, "loss": 1.2586, "step": 7834 }, { "epoch": 62.68, "grad_norm": 20.51089859008789, "learning_rate": 2.0773333333333333e-05, "loss": 0.9509, "step": 7835 }, { "epoch": 62.688, "grad_norm": 23.118667602539062, "learning_rate": 2.076888888888889e-05, "loss": 0.7343, "step": 7836 }, { "epoch": 62.696, "grad_norm": 15.217379570007324, "learning_rate": 2.0764444444444446e-05, "loss": 0.8576, "step": 7837 }, { "epoch": 62.704, "grad_norm": 36.75619125366211, "learning_rate": 2.076e-05, "loss": 0.9423, "step": 7838 }, { "epoch": 62.712, "grad_norm": 24.04735565185547, "learning_rate": 2.0755555555555556e-05, "loss": 0.7878, "step": 7839 }, { "epoch": 62.72, "grad_norm": 20.23660659790039, "learning_rate": 2.0751111111111114e-05, "loss": 1.5016, "step": 7840 }, { "epoch": 62.728, "grad_norm": 23.526025772094727, "learning_rate": 2.0746666666666666e-05, "loss": 0.6881, "step": 7841 }, { "epoch": 62.736, "grad_norm": 33.2816162109375, "learning_rate": 2.0742222222222224e-05, "loss": 1.6433, "step": 7842 }, { "epoch": 62.744, "grad_norm": 19.152687072753906, "learning_rate": 2.073777777777778e-05, "loss": 0.9997, "step": 7843 }, { "epoch": 62.752, "grad_norm": 43.670597076416016, "learning_rate": 2.0733333333333334e-05, "loss": 1.1539, "step": 7844 }, { "epoch": 62.76, "grad_norm": 34.12837600708008, "learning_rate": 2.072888888888889e-05, "loss": 0.8628, "step": 7845 }, { "epoch": 62.768, "grad_norm": 18.771095275878906, "learning_rate": 2.0724444444444447e-05, "loss": 1.03, "step": 7846 }, { "epoch": 62.776, "grad_norm": 16.078338623046875, "learning_rate": 2.072e-05, "loss": 0.8757, "step": 7847 }, { "epoch": 62.784, "grad_norm": 48.9832763671875, "learning_rate": 2.0715555555555556e-05, "loss": 1.3914, "step": 7848 }, { "epoch": 62.792, "grad_norm": 50.90598678588867, "learning_rate": 2.071111111111111e-05, "loss": 1.1864, "step": 7849 }, { "epoch": 62.8, "grad_norm": 25.321613311767578, "learning_rate": 2.070666666666667e-05, "loss": 1.1906, "step": 7850 }, { "epoch": 62.808, "grad_norm": 17.112539291381836, "learning_rate": 2.0702222222222224e-05, "loss": 1.1598, "step": 7851 }, { "epoch": 62.816, "grad_norm": 21.264436721801758, "learning_rate": 2.069777777777778e-05, "loss": 0.9345, "step": 7852 }, { "epoch": 62.824, "grad_norm": 31.48751449584961, "learning_rate": 2.0693333333333334e-05, "loss": 0.5988, "step": 7853 }, { "epoch": 62.832, "grad_norm": 30.658409118652344, "learning_rate": 2.0688888888888892e-05, "loss": 1.0596, "step": 7854 }, { "epoch": 62.84, "grad_norm": 24.164981842041016, "learning_rate": 2.0684444444444444e-05, "loss": 0.8563, "step": 7855 }, { "epoch": 62.848, "grad_norm": 33.89133834838867, "learning_rate": 2.0680000000000002e-05, "loss": 1.3945, "step": 7856 }, { "epoch": 62.856, "grad_norm": 19.087682723999023, "learning_rate": 2.0675555555555557e-05, "loss": 0.974, "step": 7857 }, { "epoch": 62.864, "grad_norm": 33.186832427978516, "learning_rate": 2.0671111111111115e-05, "loss": 1.3935, "step": 7858 }, { "epoch": 62.872, "grad_norm": 24.889314651489258, "learning_rate": 2.0666666666666666e-05, "loss": 0.9157, "step": 7859 }, { "epoch": 62.88, "grad_norm": 23.738759994506836, "learning_rate": 2.0662222222222225e-05, "loss": 0.7934, "step": 7860 }, { "epoch": 62.888, "grad_norm": 31.678319931030273, "learning_rate": 2.065777777777778e-05, "loss": 0.9595, "step": 7861 }, { "epoch": 62.896, "grad_norm": 18.752573013305664, "learning_rate": 2.0653333333333334e-05, "loss": 0.7754, "step": 7862 }, { "epoch": 62.904, "grad_norm": 67.52433013916016, "learning_rate": 2.064888888888889e-05, "loss": 0.6017, "step": 7863 }, { "epoch": 62.912, "grad_norm": 47.04398727416992, "learning_rate": 2.0644444444444447e-05, "loss": 1.0773, "step": 7864 }, { "epoch": 62.92, "grad_norm": 42.20500946044922, "learning_rate": 2.0640000000000002e-05, "loss": 0.565, "step": 7865 }, { "epoch": 62.928, "grad_norm": 22.61913299560547, "learning_rate": 2.0635555555555554e-05, "loss": 1.0539, "step": 7866 }, { "epoch": 62.936, "grad_norm": 13.497827529907227, "learning_rate": 2.0631111111111112e-05, "loss": 1.0666, "step": 7867 }, { "epoch": 62.944, "grad_norm": 12.688977241516113, "learning_rate": 2.0626666666666667e-05, "loss": 0.6795, "step": 7868 }, { "epoch": 62.952, "grad_norm": 20.74944305419922, "learning_rate": 2.062222222222222e-05, "loss": 1.197, "step": 7869 }, { "epoch": 62.96, "grad_norm": 66.197509765625, "learning_rate": 2.0617777777777777e-05, "loss": 0.9501, "step": 7870 }, { "epoch": 62.968, "grad_norm": 19.14708709716797, "learning_rate": 2.0613333333333335e-05, "loss": 1.1269, "step": 7871 }, { "epoch": 62.976, "grad_norm": 21.410812377929688, "learning_rate": 2.060888888888889e-05, "loss": 1.1516, "step": 7872 }, { "epoch": 62.984, "grad_norm": 24.683530807495117, "learning_rate": 2.0604444444444444e-05, "loss": 1.7133, "step": 7873 }, { "epoch": 62.992, "grad_norm": 23.15895652770996, "learning_rate": 2.06e-05, "loss": 0.9779, "step": 7874 }, { "epoch": 63.0, "grad_norm": 25.735164642333984, "learning_rate": 2.0595555555555558e-05, "loss": 0.7737, "step": 7875 }, { "epoch": 63.0, "eval_loss": 1.0552617311477661, "eval_map": 0.4018, "eval_map_50": 0.7687, "eval_map_75": 0.3783, "eval_map_Coverall": 0.6191, "eval_map_Face_Shield": 0.451, "eval_map_Gloves": 0.3775, "eval_map_Goggles": 0.2116, "eval_map_Mask": 0.3498, "eval_map_large": 0.6167, "eval_map_medium": 0.2655, "eval_map_small": 0.2831, "eval_mar_1": 0.3123, "eval_mar_10": 0.5453, "eval_mar_100": 0.5546, "eval_mar_100_Coverall": 0.7267, "eval_mar_100_Face_Shield": 0.6824, "eval_mar_100_Gloves": 0.4869, "eval_mar_100_Goggles": 0.4156, "eval_mar_100_Mask": 0.4615, "eval_mar_large": 0.7063, "eval_mar_medium": 0.4393, "eval_mar_small": 0.3182, "eval_runtime": 0.9272, "eval_samples_per_second": 31.277, "eval_steps_per_second": 2.157, "step": 7875 }, { "epoch": 63.008, "grad_norm": 24.557880401611328, "learning_rate": 2.0591111111111112e-05, "loss": 0.9964, "step": 7876 }, { "epoch": 63.016, "grad_norm": 39.064369201660156, "learning_rate": 2.0586666666666667e-05, "loss": 1.1553, "step": 7877 }, { "epoch": 63.024, "grad_norm": 22.403575897216797, "learning_rate": 2.0582222222222222e-05, "loss": 0.9147, "step": 7878 }, { "epoch": 63.032, "grad_norm": 23.346498489379883, "learning_rate": 2.057777777777778e-05, "loss": 1.0996, "step": 7879 }, { "epoch": 63.04, "grad_norm": 22.653404235839844, "learning_rate": 2.0573333333333332e-05, "loss": 1.0679, "step": 7880 }, { "epoch": 63.048, "grad_norm": 35.19878005981445, "learning_rate": 2.056888888888889e-05, "loss": 0.9758, "step": 7881 }, { "epoch": 63.056, "grad_norm": 21.568326950073242, "learning_rate": 2.0564444444444445e-05, "loss": 0.9022, "step": 7882 }, { "epoch": 63.064, "grad_norm": 19.21860122680664, "learning_rate": 2.0560000000000003e-05, "loss": 1.0337, "step": 7883 }, { "epoch": 63.072, "grad_norm": 16.001998901367188, "learning_rate": 2.0555555555555555e-05, "loss": 1.9673, "step": 7884 }, { "epoch": 63.08, "grad_norm": 20.64181900024414, "learning_rate": 2.0551111111111113e-05, "loss": 0.8226, "step": 7885 }, { "epoch": 63.088, "grad_norm": 105.65385437011719, "learning_rate": 2.0546666666666668e-05, "loss": 0.6534, "step": 7886 }, { "epoch": 63.096, "grad_norm": 23.48910140991211, "learning_rate": 2.0542222222222222e-05, "loss": 1.2694, "step": 7887 }, { "epoch": 63.104, "grad_norm": 36.77647018432617, "learning_rate": 2.0537777777777777e-05, "loss": 0.9816, "step": 7888 }, { "epoch": 63.112, "grad_norm": 23.749284744262695, "learning_rate": 2.0533333333333336e-05, "loss": 0.9993, "step": 7889 }, { "epoch": 63.12, "grad_norm": 30.80189323425293, "learning_rate": 2.052888888888889e-05, "loss": 0.7281, "step": 7890 }, { "epoch": 63.128, "grad_norm": 20.292179107666016, "learning_rate": 2.0524444444444445e-05, "loss": 0.5631, "step": 7891 }, { "epoch": 63.136, "grad_norm": 13.605567932128906, "learning_rate": 2.052e-05, "loss": 1.1302, "step": 7892 }, { "epoch": 63.144, "grad_norm": 28.535829544067383, "learning_rate": 2.0515555555555558e-05, "loss": 0.8697, "step": 7893 }, { "epoch": 63.152, "grad_norm": 30.450603485107422, "learning_rate": 2.0511111111111113e-05, "loss": 1.0048, "step": 7894 }, { "epoch": 63.16, "grad_norm": 29.1225643157959, "learning_rate": 2.0506666666666668e-05, "loss": 0.7096, "step": 7895 }, { "epoch": 63.168, "grad_norm": 48.03178787231445, "learning_rate": 2.0502222222222223e-05, "loss": 0.8396, "step": 7896 }, { "epoch": 63.176, "grad_norm": 24.152359008789062, "learning_rate": 2.049777777777778e-05, "loss": 1.1034, "step": 7897 }, { "epoch": 63.184, "grad_norm": 32.28403091430664, "learning_rate": 2.0493333333333333e-05, "loss": 0.9079, "step": 7898 }, { "epoch": 63.192, "grad_norm": 31.303091049194336, "learning_rate": 2.048888888888889e-05, "loss": 0.9603, "step": 7899 }, { "epoch": 63.2, "grad_norm": 33.68552017211914, "learning_rate": 2.0484444444444446e-05, "loss": 0.8175, "step": 7900 }, { "epoch": 63.208, "grad_norm": 24.673261642456055, "learning_rate": 2.048e-05, "loss": 2.0801, "step": 7901 }, { "epoch": 63.216, "grad_norm": 44.16215133666992, "learning_rate": 2.0475555555555555e-05, "loss": 0.6678, "step": 7902 }, { "epoch": 63.224, "grad_norm": 26.280319213867188, "learning_rate": 2.0471111111111114e-05, "loss": 0.7831, "step": 7903 }, { "epoch": 63.232, "grad_norm": 19.563358306884766, "learning_rate": 2.046666666666667e-05, "loss": 1.0577, "step": 7904 }, { "epoch": 63.24, "grad_norm": 21.874958038330078, "learning_rate": 2.0462222222222223e-05, "loss": 0.685, "step": 7905 }, { "epoch": 63.248, "grad_norm": 63.29003143310547, "learning_rate": 2.0457777777777778e-05, "loss": 1.0702, "step": 7906 }, { "epoch": 63.256, "grad_norm": 20.448665618896484, "learning_rate": 2.0453333333333336e-05, "loss": 1.2689, "step": 7907 }, { "epoch": 63.264, "grad_norm": 16.460901260375977, "learning_rate": 2.044888888888889e-05, "loss": 0.6933, "step": 7908 }, { "epoch": 63.272, "grad_norm": 14.203088760375977, "learning_rate": 2.0444444444444446e-05, "loss": 0.8087, "step": 7909 }, { "epoch": 63.28, "grad_norm": 33.46143341064453, "learning_rate": 2.044e-05, "loss": 0.9995, "step": 7910 }, { "epoch": 63.288, "grad_norm": 17.380903244018555, "learning_rate": 2.043555555555556e-05, "loss": 1.2131, "step": 7911 }, { "epoch": 63.296, "grad_norm": 31.902997970581055, "learning_rate": 2.043111111111111e-05, "loss": 1.0154, "step": 7912 }, { "epoch": 63.304, "grad_norm": 17.243648529052734, "learning_rate": 2.042666666666667e-05, "loss": 0.9095, "step": 7913 }, { "epoch": 63.312, "grad_norm": 34.88461685180664, "learning_rate": 2.0422222222222224e-05, "loss": 1.1397, "step": 7914 }, { "epoch": 63.32, "grad_norm": 21.87165069580078, "learning_rate": 2.041777777777778e-05, "loss": 1.0184, "step": 7915 }, { "epoch": 63.328, "grad_norm": 35.20166015625, "learning_rate": 2.0413333333333333e-05, "loss": 0.8559, "step": 7916 }, { "epoch": 63.336, "grad_norm": 13.269622802734375, "learning_rate": 2.0408888888888888e-05, "loss": 1.0542, "step": 7917 }, { "epoch": 63.344, "grad_norm": 19.566469192504883, "learning_rate": 2.0404444444444446e-05, "loss": 0.7501, "step": 7918 }, { "epoch": 63.352, "grad_norm": 31.19718360900879, "learning_rate": 2.04e-05, "loss": 0.7903, "step": 7919 }, { "epoch": 63.36, "grad_norm": 37.5673713684082, "learning_rate": 2.0395555555555556e-05, "loss": 0.991, "step": 7920 }, { "epoch": 63.368, "grad_norm": 38.89330291748047, "learning_rate": 2.039111111111111e-05, "loss": 0.9869, "step": 7921 }, { "epoch": 63.376, "grad_norm": 32.94016647338867, "learning_rate": 2.038666666666667e-05, "loss": 1.0626, "step": 7922 }, { "epoch": 63.384, "grad_norm": 19.584083557128906, "learning_rate": 2.038222222222222e-05, "loss": 0.7203, "step": 7923 }, { "epoch": 63.392, "grad_norm": 30.52638816833496, "learning_rate": 2.037777777777778e-05, "loss": 0.986, "step": 7924 }, { "epoch": 63.4, "grad_norm": 30.92195701599121, "learning_rate": 2.0373333333333334e-05, "loss": 1.1646, "step": 7925 }, { "epoch": 63.408, "grad_norm": 41.509429931640625, "learning_rate": 2.036888888888889e-05, "loss": 0.7552, "step": 7926 }, { "epoch": 63.416, "grad_norm": 20.91619300842285, "learning_rate": 2.0364444444444443e-05, "loss": 0.7867, "step": 7927 }, { "epoch": 63.424, "grad_norm": 30.590190887451172, "learning_rate": 2.036e-05, "loss": 0.7593, "step": 7928 }, { "epoch": 63.432, "grad_norm": 42.71556854248047, "learning_rate": 2.0355555555555556e-05, "loss": 1.1787, "step": 7929 }, { "epoch": 63.44, "grad_norm": 38.081729888916016, "learning_rate": 2.035111111111111e-05, "loss": 1.046, "step": 7930 }, { "epoch": 63.448, "grad_norm": 25.74589729309082, "learning_rate": 2.0346666666666666e-05, "loss": 1.0928, "step": 7931 }, { "epoch": 63.456, "grad_norm": 15.82271957397461, "learning_rate": 2.0342222222222224e-05, "loss": 0.876, "step": 7932 }, { "epoch": 63.464, "grad_norm": 20.214982986450195, "learning_rate": 2.033777777777778e-05, "loss": 0.9218, "step": 7933 }, { "epoch": 63.472, "grad_norm": 53.23439407348633, "learning_rate": 2.0333333333333334e-05, "loss": 0.9462, "step": 7934 }, { "epoch": 63.48, "grad_norm": 281.3741760253906, "learning_rate": 2.032888888888889e-05, "loss": 0.9979, "step": 7935 }, { "epoch": 63.488, "grad_norm": 29.94713592529297, "learning_rate": 2.0324444444444447e-05, "loss": 1.0383, "step": 7936 }, { "epoch": 63.496, "grad_norm": 23.018922805786133, "learning_rate": 2.032e-05, "loss": 0.7859, "step": 7937 }, { "epoch": 63.504, "grad_norm": 25.711650848388672, "learning_rate": 2.0315555555555557e-05, "loss": 0.7776, "step": 7938 }, { "epoch": 63.512, "grad_norm": 27.815650939941406, "learning_rate": 2.031111111111111e-05, "loss": 0.7451, "step": 7939 }, { "epoch": 63.52, "grad_norm": 71.19403076171875, "learning_rate": 2.030666666666667e-05, "loss": 0.8262, "step": 7940 }, { "epoch": 63.528, "grad_norm": 22.541881561279297, "learning_rate": 2.030222222222222e-05, "loss": 1.0713, "step": 7941 }, { "epoch": 63.536, "grad_norm": 30.234817504882812, "learning_rate": 2.029777777777778e-05, "loss": 1.0365, "step": 7942 }, { "epoch": 63.544, "grad_norm": 27.9256534576416, "learning_rate": 2.0293333333333334e-05, "loss": 0.9179, "step": 7943 }, { "epoch": 63.552, "grad_norm": 40.308048248291016, "learning_rate": 2.028888888888889e-05, "loss": 1.0112, "step": 7944 }, { "epoch": 63.56, "grad_norm": 21.422880172729492, "learning_rate": 2.0284444444444444e-05, "loss": 0.882, "step": 7945 }, { "epoch": 63.568, "grad_norm": 32.85591506958008, "learning_rate": 2.0280000000000002e-05, "loss": 1.3503, "step": 7946 }, { "epoch": 63.576, "grad_norm": 22.39306640625, "learning_rate": 2.0275555555555557e-05, "loss": 1.0637, "step": 7947 }, { "epoch": 63.584, "grad_norm": 53.06791687011719, "learning_rate": 2.0271111111111112e-05, "loss": 1.1413, "step": 7948 }, { "epoch": 63.592, "grad_norm": 17.499061584472656, "learning_rate": 2.0266666666666667e-05, "loss": 0.7625, "step": 7949 }, { "epoch": 63.6, "grad_norm": 16.91476821899414, "learning_rate": 2.0262222222222225e-05, "loss": 0.8191, "step": 7950 }, { "epoch": 63.608, "grad_norm": 18.474056243896484, "learning_rate": 2.025777777777778e-05, "loss": 0.6829, "step": 7951 }, { "epoch": 63.616, "grad_norm": 28.313074111938477, "learning_rate": 2.0253333333333335e-05, "loss": 0.9174, "step": 7952 }, { "epoch": 63.624, "grad_norm": 644.1586303710938, "learning_rate": 2.024888888888889e-05, "loss": 1.7992, "step": 7953 }, { "epoch": 63.632, "grad_norm": 28.468530654907227, "learning_rate": 2.0244444444444448e-05, "loss": 0.9024, "step": 7954 }, { "epoch": 63.64, "grad_norm": 25.724239349365234, "learning_rate": 2.024e-05, "loss": 1.0776, "step": 7955 }, { "epoch": 63.648, "grad_norm": 15.779276847839355, "learning_rate": 2.0235555555555558e-05, "loss": 0.8406, "step": 7956 }, { "epoch": 63.656, "grad_norm": 24.472021102905273, "learning_rate": 2.0231111111111112e-05, "loss": 0.9301, "step": 7957 }, { "epoch": 63.664, "grad_norm": 15.199453353881836, "learning_rate": 2.0226666666666667e-05, "loss": 1.1232, "step": 7958 }, { "epoch": 63.672, "grad_norm": 25.54340934753418, "learning_rate": 2.0222222222222222e-05, "loss": 1.2157, "step": 7959 }, { "epoch": 63.68, "grad_norm": 14.074874877929688, "learning_rate": 2.021777777777778e-05, "loss": 0.9505, "step": 7960 }, { "epoch": 63.688, "grad_norm": 45.56537628173828, "learning_rate": 2.0213333333333335e-05, "loss": 0.7087, "step": 7961 }, { "epoch": 63.696, "grad_norm": 81.12066650390625, "learning_rate": 2.020888888888889e-05, "loss": 1.9673, "step": 7962 }, { "epoch": 63.704, "grad_norm": 49.09004592895508, "learning_rate": 2.0204444444444445e-05, "loss": 0.7978, "step": 7963 }, { "epoch": 63.712, "grad_norm": 42.721439361572266, "learning_rate": 2.0200000000000003e-05, "loss": 1.2106, "step": 7964 }, { "epoch": 63.72, "grad_norm": 26.982620239257812, "learning_rate": 2.0195555555555558e-05, "loss": 1.2748, "step": 7965 }, { "epoch": 63.728, "grad_norm": 104.50603485107422, "learning_rate": 2.0191111111111113e-05, "loss": 0.9982, "step": 7966 }, { "epoch": 63.736, "grad_norm": 38.45545196533203, "learning_rate": 2.0186666666666668e-05, "loss": 0.9569, "step": 7967 }, { "epoch": 63.744, "grad_norm": 29.183897018432617, "learning_rate": 2.0182222222222222e-05, "loss": 1.1687, "step": 7968 }, { "epoch": 63.752, "grad_norm": 21.243730545043945, "learning_rate": 2.0177777777777777e-05, "loss": 0.7023, "step": 7969 }, { "epoch": 63.76, "grad_norm": 34.31673812866211, "learning_rate": 2.0173333333333332e-05, "loss": 1.2062, "step": 7970 }, { "epoch": 63.768, "grad_norm": 21.158042907714844, "learning_rate": 2.016888888888889e-05, "loss": 0.737, "step": 7971 }, { "epoch": 63.776, "grad_norm": 97.6087417602539, "learning_rate": 2.0164444444444445e-05, "loss": 0.9239, "step": 7972 }, { "epoch": 63.784, "grad_norm": 34.76537322998047, "learning_rate": 2.016e-05, "loss": 0.9726, "step": 7973 }, { "epoch": 63.792, "grad_norm": 34.92851638793945, "learning_rate": 2.0155555555555555e-05, "loss": 0.6793, "step": 7974 }, { "epoch": 63.8, "grad_norm": 22.767980575561523, "learning_rate": 2.0151111111111113e-05, "loss": 0.8112, "step": 7975 }, { "epoch": 63.808, "grad_norm": 16.482345581054688, "learning_rate": 2.0146666666666668e-05, "loss": 0.9004, "step": 7976 }, { "epoch": 63.816, "grad_norm": 21.924131393432617, "learning_rate": 2.0142222222222223e-05, "loss": 1.0068, "step": 7977 }, { "epoch": 63.824, "grad_norm": 23.125303268432617, "learning_rate": 2.0137777777777778e-05, "loss": 0.8643, "step": 7978 }, { "epoch": 63.832, "grad_norm": 22.33720588684082, "learning_rate": 2.0133333333333336e-05, "loss": 0.67, "step": 7979 }, { "epoch": 63.84, "grad_norm": 25.593719482421875, "learning_rate": 2.0128888888888887e-05, "loss": 0.775, "step": 7980 }, { "epoch": 63.848, "grad_norm": 54.60942459106445, "learning_rate": 2.0124444444444446e-05, "loss": 0.7214, "step": 7981 }, { "epoch": 63.856, "grad_norm": 26.528465270996094, "learning_rate": 2.012e-05, "loss": 1.0746, "step": 7982 }, { "epoch": 63.864, "grad_norm": 31.940580368041992, "learning_rate": 2.0115555555555555e-05, "loss": 3.2248, "step": 7983 }, { "epoch": 63.872, "grad_norm": 21.21847152709961, "learning_rate": 2.011111111111111e-05, "loss": 0.9089, "step": 7984 }, { "epoch": 63.88, "grad_norm": 17.808935165405273, "learning_rate": 2.010666666666667e-05, "loss": 1.9545, "step": 7985 }, { "epoch": 63.888, "grad_norm": 25.700820922851562, "learning_rate": 2.0102222222222223e-05, "loss": 0.8534, "step": 7986 }, { "epoch": 63.896, "grad_norm": 35.6420783996582, "learning_rate": 2.0097777777777778e-05, "loss": 0.6922, "step": 7987 }, { "epoch": 63.904, "grad_norm": 28.990299224853516, "learning_rate": 2.0093333333333333e-05, "loss": 1.2288, "step": 7988 }, { "epoch": 63.912, "grad_norm": 23.812803268432617, "learning_rate": 2.008888888888889e-05, "loss": 0.7566, "step": 7989 }, { "epoch": 63.92, "grad_norm": 22.31865882873535, "learning_rate": 2.0084444444444446e-05, "loss": 0.8278, "step": 7990 }, { "epoch": 63.928, "grad_norm": 23.090435028076172, "learning_rate": 2.008e-05, "loss": 0.9668, "step": 7991 }, { "epoch": 63.936, "grad_norm": 18.473535537719727, "learning_rate": 2.0075555555555556e-05, "loss": 0.6014, "step": 7992 }, { "epoch": 63.944, "grad_norm": 15.157971382141113, "learning_rate": 2.0071111111111114e-05, "loss": 0.7739, "step": 7993 }, { "epoch": 63.952, "grad_norm": 18.794984817504883, "learning_rate": 2.0066666666666665e-05, "loss": 0.9274, "step": 7994 }, { "epoch": 63.96, "grad_norm": 27.121511459350586, "learning_rate": 2.0062222222222224e-05, "loss": 0.7877, "step": 7995 }, { "epoch": 63.968, "grad_norm": 19.727380752563477, "learning_rate": 2.005777777777778e-05, "loss": 0.8718, "step": 7996 }, { "epoch": 63.976, "grad_norm": 51.03353500366211, "learning_rate": 2.0053333333333337e-05, "loss": 1.1591, "step": 7997 }, { "epoch": 63.984, "grad_norm": 23.87142562866211, "learning_rate": 2.0048888888888888e-05, "loss": 0.8458, "step": 7998 }, { "epoch": 63.992, "grad_norm": 24.872159957885742, "learning_rate": 2.0044444444444446e-05, "loss": 1.216, "step": 7999 }, { "epoch": 64.0, "grad_norm": 29.193553924560547, "learning_rate": 2.004e-05, "loss": 1.0918, "step": 8000 }, { "epoch": 64.0, "eval_loss": 1.0524173974990845, "eval_map": 0.428, "eval_map_50": 0.7734, "eval_map_75": 0.4606, "eval_map_Coverall": 0.6405, "eval_map_Face_Shield": 0.5249, "eval_map_Gloves": 0.349, "eval_map_Goggles": 0.2166, "eval_map_Mask": 0.4088, "eval_map_large": 0.6433, "eval_map_medium": 0.2975, "eval_map_small": 0.3731, "eval_mar_1": 0.3215, "eval_mar_10": 0.5717, "eval_mar_100": 0.5811, "eval_mar_100_Coverall": 0.7289, "eval_mar_100_Face_Shield": 0.7235, "eval_mar_100_Gloves": 0.4541, "eval_mar_100_Goggles": 0.4969, "eval_mar_100_Mask": 0.5019, "eval_mar_large": 0.7517, "eval_mar_medium": 0.4437, "eval_mar_small": 0.4435, "eval_runtime": 0.9323, "eval_samples_per_second": 31.106, "eval_steps_per_second": 2.145, "step": 8000 }, { "epoch": 64.008, "grad_norm": 15.728273391723633, "learning_rate": 2.0035555555555556e-05, "loss": 0.7062, "step": 8001 }, { "epoch": 64.016, "grad_norm": 25.291872024536133, "learning_rate": 2.003111111111111e-05, "loss": 0.8613, "step": 8002 }, { "epoch": 64.024, "grad_norm": 16.628875732421875, "learning_rate": 2.002666666666667e-05, "loss": 0.8601, "step": 8003 }, { "epoch": 64.032, "grad_norm": 37.524559020996094, "learning_rate": 2.0022222222222224e-05, "loss": 0.6917, "step": 8004 }, { "epoch": 64.04, "grad_norm": 28.64017105102539, "learning_rate": 2.001777777777778e-05, "loss": 1.2376, "step": 8005 }, { "epoch": 64.048, "grad_norm": 48.57573318481445, "learning_rate": 2.0013333333333334e-05, "loss": 0.5916, "step": 8006 }, { "epoch": 64.056, "grad_norm": 35.936031341552734, "learning_rate": 2.0008888888888892e-05, "loss": 1.1889, "step": 8007 }, { "epoch": 64.064, "grad_norm": 21.46980094909668, "learning_rate": 2.0004444444444447e-05, "loss": 0.7889, "step": 8008 }, { "epoch": 64.072, "grad_norm": 23.16191864013672, "learning_rate": 2e-05, "loss": 1.0934, "step": 8009 }, { "epoch": 64.08, "grad_norm": 121.56402587890625, "learning_rate": 1.9995555555555556e-05, "loss": 1.3342, "step": 8010 }, { "epoch": 64.088, "grad_norm": 37.142738342285156, "learning_rate": 1.9991111111111115e-05, "loss": 1.3517, "step": 8011 }, { "epoch": 64.096, "grad_norm": 36.01681900024414, "learning_rate": 1.9986666666666666e-05, "loss": 0.9164, "step": 8012 }, { "epoch": 64.104, "grad_norm": 21.75078773498535, "learning_rate": 1.9982222222222224e-05, "loss": 1.0875, "step": 8013 }, { "epoch": 64.112, "grad_norm": 52.910789489746094, "learning_rate": 1.997777777777778e-05, "loss": 1.7811, "step": 8014 }, { "epoch": 64.12, "grad_norm": 16.558713912963867, "learning_rate": 1.9973333333333334e-05, "loss": 1.135, "step": 8015 }, { "epoch": 64.128, "grad_norm": 19.714094161987305, "learning_rate": 1.996888888888889e-05, "loss": 0.9806, "step": 8016 }, { "epoch": 64.136, "grad_norm": 23.41607093811035, "learning_rate": 1.9964444444444447e-05, "loss": 0.7875, "step": 8017 }, { "epoch": 64.144, "grad_norm": 31.398834228515625, "learning_rate": 1.9960000000000002e-05, "loss": 0.9853, "step": 8018 }, { "epoch": 64.152, "grad_norm": 33.62116622924805, "learning_rate": 1.9955555555555557e-05, "loss": 0.943, "step": 8019 }, { "epoch": 64.16, "grad_norm": 23.780776977539062, "learning_rate": 1.9951111111111112e-05, "loss": 0.7722, "step": 8020 }, { "epoch": 64.168, "grad_norm": 21.162260055541992, "learning_rate": 1.9946666666666667e-05, "loss": 1.0459, "step": 8021 }, { "epoch": 64.176, "grad_norm": 14.254731178283691, "learning_rate": 1.9942222222222225e-05, "loss": 0.892, "step": 8022 }, { "epoch": 64.184, "grad_norm": 39.703670501708984, "learning_rate": 1.9937777777777776e-05, "loss": 0.8809, "step": 8023 }, { "epoch": 64.192, "grad_norm": 25.396831512451172, "learning_rate": 1.9933333333333334e-05, "loss": 0.8654, "step": 8024 }, { "epoch": 64.2, "grad_norm": 19.50859260559082, "learning_rate": 1.992888888888889e-05, "loss": 0.9633, "step": 8025 }, { "epoch": 64.208, "grad_norm": 22.543956756591797, "learning_rate": 1.9924444444444444e-05, "loss": 0.7218, "step": 8026 }, { "epoch": 64.216, "grad_norm": 39.789947509765625, "learning_rate": 1.992e-05, "loss": 1.2045, "step": 8027 }, { "epoch": 64.224, "grad_norm": 40.84066390991211, "learning_rate": 1.9915555555555557e-05, "loss": 0.8784, "step": 8028 }, { "epoch": 64.232, "grad_norm": 27.516441345214844, "learning_rate": 1.9911111111111112e-05, "loss": 0.9229, "step": 8029 }, { "epoch": 64.24, "grad_norm": 59.5345573425293, "learning_rate": 1.9906666666666667e-05, "loss": 0.8917, "step": 8030 }, { "epoch": 64.248, "grad_norm": 29.10624122619629, "learning_rate": 1.9902222222222222e-05, "loss": 1.04, "step": 8031 }, { "epoch": 64.256, "grad_norm": 21.231481552124023, "learning_rate": 1.989777777777778e-05, "loss": 0.7867, "step": 8032 }, { "epoch": 64.264, "grad_norm": 32.995052337646484, "learning_rate": 1.9893333333333335e-05, "loss": 0.8924, "step": 8033 }, { "epoch": 64.272, "grad_norm": 18.382387161254883, "learning_rate": 1.988888888888889e-05, "loss": 0.6661, "step": 8034 }, { "epoch": 64.28, "grad_norm": 27.986225128173828, "learning_rate": 1.9884444444444445e-05, "loss": 1.1633, "step": 8035 }, { "epoch": 64.288, "grad_norm": 18.684165954589844, "learning_rate": 1.9880000000000003e-05, "loss": 0.7658, "step": 8036 }, { "epoch": 64.296, "grad_norm": 35.17795944213867, "learning_rate": 1.9875555555555554e-05, "loss": 0.9547, "step": 8037 }, { "epoch": 64.304, "grad_norm": 24.470394134521484, "learning_rate": 1.9871111111111112e-05, "loss": 0.8501, "step": 8038 }, { "epoch": 64.312, "grad_norm": 32.6623649597168, "learning_rate": 1.9866666666666667e-05, "loss": 0.8112, "step": 8039 }, { "epoch": 64.32, "grad_norm": 304.96368408203125, "learning_rate": 1.9862222222222222e-05, "loss": 0.7095, "step": 8040 }, { "epoch": 64.328, "grad_norm": 19.067983627319336, "learning_rate": 1.9857777777777777e-05, "loss": 0.9884, "step": 8041 }, { "epoch": 64.336, "grad_norm": 24.146352767944336, "learning_rate": 1.9853333333333335e-05, "loss": 1.3758, "step": 8042 }, { "epoch": 64.344, "grad_norm": 17.508541107177734, "learning_rate": 1.984888888888889e-05, "loss": 0.9762, "step": 8043 }, { "epoch": 64.352, "grad_norm": 25.567241668701172, "learning_rate": 1.9844444444444445e-05, "loss": 1.0537, "step": 8044 }, { "epoch": 64.36, "grad_norm": 41.84922790527344, "learning_rate": 1.984e-05, "loss": 1.4387, "step": 8045 }, { "epoch": 64.368, "grad_norm": 30.269332885742188, "learning_rate": 1.9835555555555558e-05, "loss": 0.8962, "step": 8046 }, { "epoch": 64.376, "grad_norm": 47.419761657714844, "learning_rate": 1.9831111111111113e-05, "loss": 1.1646, "step": 8047 }, { "epoch": 64.384, "grad_norm": 28.326581954956055, "learning_rate": 1.9826666666666668e-05, "loss": 0.8133, "step": 8048 }, { "epoch": 64.392, "grad_norm": 12.82911491394043, "learning_rate": 1.9822222222222223e-05, "loss": 0.8806, "step": 8049 }, { "epoch": 64.4, "grad_norm": 18.427522659301758, "learning_rate": 1.981777777777778e-05, "loss": 0.799, "step": 8050 }, { "epoch": 64.408, "grad_norm": 38.42760467529297, "learning_rate": 1.9813333333333332e-05, "loss": 1.0397, "step": 8051 }, { "epoch": 64.416, "grad_norm": 28.694774627685547, "learning_rate": 1.980888888888889e-05, "loss": 0.6997, "step": 8052 }, { "epoch": 64.424, "grad_norm": 13.268213272094727, "learning_rate": 1.9804444444444445e-05, "loss": 0.7794, "step": 8053 }, { "epoch": 64.432, "grad_norm": 49.683834075927734, "learning_rate": 1.9800000000000004e-05, "loss": 1.3428, "step": 8054 }, { "epoch": 64.44, "grad_norm": 52.50239181518555, "learning_rate": 1.9795555555555555e-05, "loss": 1.1805, "step": 8055 }, { "epoch": 64.448, "grad_norm": 14.20164966583252, "learning_rate": 1.9791111111111113e-05, "loss": 0.7283, "step": 8056 }, { "epoch": 64.456, "grad_norm": 29.524816513061523, "learning_rate": 1.9786666666666668e-05, "loss": 0.9241, "step": 8057 }, { "epoch": 64.464, "grad_norm": 30.617149353027344, "learning_rate": 1.9782222222222223e-05, "loss": 1.3681, "step": 8058 }, { "epoch": 64.472, "grad_norm": 34.305477142333984, "learning_rate": 1.9777777777777778e-05, "loss": 1.2354, "step": 8059 }, { "epoch": 64.48, "grad_norm": 30.482486724853516, "learning_rate": 1.9773333333333336e-05, "loss": 1.123, "step": 8060 }, { "epoch": 64.488, "grad_norm": 19.831815719604492, "learning_rate": 1.976888888888889e-05, "loss": 0.8017, "step": 8061 }, { "epoch": 64.496, "grad_norm": 52.44136047363281, "learning_rate": 1.9764444444444446e-05, "loss": 1.0782, "step": 8062 }, { "epoch": 64.504, "grad_norm": 44.134063720703125, "learning_rate": 1.976e-05, "loss": 1.1713, "step": 8063 }, { "epoch": 64.512, "grad_norm": 26.844932556152344, "learning_rate": 1.975555555555556e-05, "loss": 0.9615, "step": 8064 }, { "epoch": 64.52, "grad_norm": 29.623788833618164, "learning_rate": 1.975111111111111e-05, "loss": 1.0566, "step": 8065 }, { "epoch": 64.528, "grad_norm": 18.007211685180664, "learning_rate": 1.974666666666667e-05, "loss": 0.9672, "step": 8066 }, { "epoch": 64.536, "grad_norm": 39.04624938964844, "learning_rate": 1.9742222222222223e-05, "loss": 1.0129, "step": 8067 }, { "epoch": 64.544, "grad_norm": 24.059757232666016, "learning_rate": 1.973777777777778e-05, "loss": 1.0931, "step": 8068 }, { "epoch": 64.552, "grad_norm": 36.3431282043457, "learning_rate": 1.9733333333333333e-05, "loss": 0.929, "step": 8069 }, { "epoch": 64.56, "grad_norm": 22.344768524169922, "learning_rate": 1.972888888888889e-05, "loss": 1.0944, "step": 8070 }, { "epoch": 64.568, "grad_norm": 21.717098236083984, "learning_rate": 1.9724444444444446e-05, "loss": 0.981, "step": 8071 }, { "epoch": 64.576, "grad_norm": 39.44084167480469, "learning_rate": 1.972e-05, "loss": 1.1436, "step": 8072 }, { "epoch": 64.584, "grad_norm": 47.164302825927734, "learning_rate": 1.9715555555555556e-05, "loss": 0.9188, "step": 8073 }, { "epoch": 64.592, "grad_norm": 44.8267936706543, "learning_rate": 1.971111111111111e-05, "loss": 0.9727, "step": 8074 }, { "epoch": 64.6, "grad_norm": 20.912485122680664, "learning_rate": 1.970666666666667e-05, "loss": 1.1815, "step": 8075 }, { "epoch": 64.608, "grad_norm": 52.649959564208984, "learning_rate": 1.970222222222222e-05, "loss": 1.1477, "step": 8076 }, { "epoch": 64.616, "grad_norm": 57.33477783203125, "learning_rate": 1.969777777777778e-05, "loss": 0.753, "step": 8077 }, { "epoch": 64.624, "grad_norm": 29.785234451293945, "learning_rate": 1.9693333333333333e-05, "loss": 0.6436, "step": 8078 }, { "epoch": 64.632, "grad_norm": 33.13258743286133, "learning_rate": 1.968888888888889e-05, "loss": 1.1578, "step": 8079 }, { "epoch": 64.64, "grad_norm": 16.328208923339844, "learning_rate": 1.9684444444444443e-05, "loss": 0.9775, "step": 8080 }, { "epoch": 64.648, "grad_norm": 114.32437896728516, "learning_rate": 1.968e-05, "loss": 1.1029, "step": 8081 }, { "epoch": 64.656, "grad_norm": 14.510298728942871, "learning_rate": 1.9675555555555556e-05, "loss": 0.8224, "step": 8082 }, { "epoch": 64.664, "grad_norm": 21.329370498657227, "learning_rate": 1.967111111111111e-05, "loss": 0.8492, "step": 8083 }, { "epoch": 64.672, "grad_norm": 21.816617965698242, "learning_rate": 1.9666666666666666e-05, "loss": 0.9174, "step": 8084 }, { "epoch": 64.68, "grad_norm": 76.98027801513672, "learning_rate": 1.9662222222222224e-05, "loss": 0.852, "step": 8085 }, { "epoch": 64.688, "grad_norm": 23.810941696166992, "learning_rate": 1.965777777777778e-05, "loss": 0.958, "step": 8086 }, { "epoch": 64.696, "grad_norm": 23.13611602783203, "learning_rate": 1.9653333333333334e-05, "loss": 1.0462, "step": 8087 }, { "epoch": 64.704, "grad_norm": 18.292827606201172, "learning_rate": 1.964888888888889e-05, "loss": 1.0159, "step": 8088 }, { "epoch": 64.712, "grad_norm": 17.085159301757812, "learning_rate": 1.9644444444444447e-05, "loss": 1.013, "step": 8089 }, { "epoch": 64.72, "grad_norm": 69.5740966796875, "learning_rate": 1.9640000000000002e-05, "loss": 1.1609, "step": 8090 }, { "epoch": 64.728, "grad_norm": 30.283164978027344, "learning_rate": 1.9635555555555557e-05, "loss": 0.9347, "step": 8091 }, { "epoch": 64.736, "grad_norm": 14.692340850830078, "learning_rate": 1.963111111111111e-05, "loss": 1.1588, "step": 8092 }, { "epoch": 64.744, "grad_norm": 28.040786743164062, "learning_rate": 1.962666666666667e-05, "loss": 0.613, "step": 8093 }, { "epoch": 64.752, "grad_norm": 21.57931137084961, "learning_rate": 1.962222222222222e-05, "loss": 0.7266, "step": 8094 }, { "epoch": 64.76, "grad_norm": 10.440711975097656, "learning_rate": 1.961777777777778e-05, "loss": 0.6436, "step": 8095 }, { "epoch": 64.768, "grad_norm": 17.353429794311523, "learning_rate": 1.9613333333333334e-05, "loss": 0.7726, "step": 8096 }, { "epoch": 64.776, "grad_norm": 20.35390853881836, "learning_rate": 1.960888888888889e-05, "loss": 0.7382, "step": 8097 }, { "epoch": 64.784, "grad_norm": 21.246566772460938, "learning_rate": 1.9604444444444444e-05, "loss": 0.7198, "step": 8098 }, { "epoch": 64.792, "grad_norm": 48.354820251464844, "learning_rate": 1.9600000000000002e-05, "loss": 0.6995, "step": 8099 }, { "epoch": 64.8, "grad_norm": 20.498313903808594, "learning_rate": 1.9595555555555557e-05, "loss": 0.8588, "step": 8100 }, { "epoch": 64.808, "grad_norm": 55.9484748840332, "learning_rate": 1.9591111111111112e-05, "loss": 2.4877, "step": 8101 }, { "epoch": 64.816, "grad_norm": 42.8573112487793, "learning_rate": 1.9586666666666667e-05, "loss": 0.9593, "step": 8102 }, { "epoch": 64.824, "grad_norm": 23.578678131103516, "learning_rate": 1.9582222222222225e-05, "loss": 0.8133, "step": 8103 }, { "epoch": 64.832, "grad_norm": 35.506343841552734, "learning_rate": 1.957777777777778e-05, "loss": 1.1626, "step": 8104 }, { "epoch": 64.84, "grad_norm": 42.25819396972656, "learning_rate": 1.9573333333333335e-05, "loss": 1.0594, "step": 8105 }, { "epoch": 64.848, "grad_norm": 33.36237716674805, "learning_rate": 1.956888888888889e-05, "loss": 2.2558, "step": 8106 }, { "epoch": 64.856, "grad_norm": 44.109004974365234, "learning_rate": 1.9564444444444448e-05, "loss": 1.0976, "step": 8107 }, { "epoch": 64.864, "grad_norm": 36.3975715637207, "learning_rate": 1.956e-05, "loss": 0.7096, "step": 8108 }, { "epoch": 64.872, "grad_norm": 17.4405517578125, "learning_rate": 1.9555555555555557e-05, "loss": 0.7447, "step": 8109 }, { "epoch": 64.88, "grad_norm": 30.482629776000977, "learning_rate": 1.9551111111111112e-05, "loss": 1.1869, "step": 8110 }, { "epoch": 64.888, "grad_norm": 31.959985733032227, "learning_rate": 1.9546666666666667e-05, "loss": 0.7642, "step": 8111 }, { "epoch": 64.896, "grad_norm": 32.673004150390625, "learning_rate": 1.9542222222222222e-05, "loss": 0.7748, "step": 8112 }, { "epoch": 64.904, "grad_norm": 27.523595809936523, "learning_rate": 1.953777777777778e-05, "loss": 0.8619, "step": 8113 }, { "epoch": 64.912, "grad_norm": 21.62162208557129, "learning_rate": 1.9533333333333335e-05, "loss": 0.8716, "step": 8114 }, { "epoch": 64.92, "grad_norm": 36.33841323852539, "learning_rate": 1.952888888888889e-05, "loss": 2.2554, "step": 8115 }, { "epoch": 64.928, "grad_norm": 111.13573455810547, "learning_rate": 1.9524444444444445e-05, "loss": 0.8462, "step": 8116 }, { "epoch": 64.936, "grad_norm": 44.04534149169922, "learning_rate": 1.9520000000000003e-05, "loss": 0.6814, "step": 8117 }, { "epoch": 64.944, "grad_norm": 29.744760513305664, "learning_rate": 1.9515555555555558e-05, "loss": 0.8093, "step": 8118 }, { "epoch": 64.952, "grad_norm": 85.5471420288086, "learning_rate": 1.9511111111111113e-05, "loss": 2.7983, "step": 8119 }, { "epoch": 64.96, "grad_norm": 28.206586837768555, "learning_rate": 1.9506666666666667e-05, "loss": 0.7269, "step": 8120 }, { "epoch": 64.968, "grad_norm": 39.8663444519043, "learning_rate": 1.9502222222222226e-05, "loss": 0.921, "step": 8121 }, { "epoch": 64.976, "grad_norm": 33.10395431518555, "learning_rate": 1.9497777777777777e-05, "loss": 0.7832, "step": 8122 }, { "epoch": 64.984, "grad_norm": 24.461931228637695, "learning_rate": 1.9493333333333332e-05, "loss": 0.7839, "step": 8123 }, { "epoch": 64.992, "grad_norm": 32.887977600097656, "learning_rate": 1.948888888888889e-05, "loss": 0.6894, "step": 8124 }, { "epoch": 65.0, "grad_norm": 26.274234771728516, "learning_rate": 1.9484444444444445e-05, "loss": 1.0918, "step": 8125 }, { "epoch": 65.0, "eval_loss": 1.0403342247009277, "eval_map": 0.4171, "eval_map_50": 0.7651, "eval_map_75": 0.3989, "eval_map_Coverall": 0.6226, "eval_map_Face_Shield": 0.4961, "eval_map_Gloves": 0.3685, "eval_map_Goggles": 0.1944, "eval_map_Mask": 0.4039, "eval_map_large": 0.6217, "eval_map_medium": 0.2796, "eval_map_small": 0.3478, "eval_mar_1": 0.3252, "eval_mar_10": 0.5667, "eval_mar_100": 0.5769, "eval_mar_100_Coverall": 0.7578, "eval_mar_100_Face_Shield": 0.6882, "eval_mar_100_Gloves": 0.4689, "eval_mar_100_Goggles": 0.4656, "eval_mar_100_Mask": 0.5038, "eval_mar_large": 0.7288, "eval_mar_medium": 0.4361, "eval_mar_small": 0.4053, "eval_runtime": 0.9075, "eval_samples_per_second": 31.957, "eval_steps_per_second": 2.204, "step": 8125 }, { "epoch": 65.008, "grad_norm": 22.26410675048828, "learning_rate": 1.948e-05, "loss": 0.9213, "step": 8126 }, { "epoch": 65.016, "grad_norm": 25.17180061340332, "learning_rate": 1.9475555555555555e-05, "loss": 1.1561, "step": 8127 }, { "epoch": 65.024, "grad_norm": 16.714141845703125, "learning_rate": 1.9471111111111113e-05, "loss": 0.657, "step": 8128 }, { "epoch": 65.032, "grad_norm": 25.834516525268555, "learning_rate": 1.9466666666666668e-05, "loss": 1.2607, "step": 8129 }, { "epoch": 65.04, "grad_norm": 33.57011413574219, "learning_rate": 1.9462222222222223e-05, "loss": 1.0704, "step": 8130 }, { "epoch": 65.048, "grad_norm": 31.190885543823242, "learning_rate": 1.9457777777777777e-05, "loss": 1.1092, "step": 8131 }, { "epoch": 65.056, "grad_norm": 23.583389282226562, "learning_rate": 1.9453333333333336e-05, "loss": 0.5934, "step": 8132 }, { "epoch": 65.064, "grad_norm": 22.776477813720703, "learning_rate": 1.9448888888888887e-05, "loss": 1.5044, "step": 8133 }, { "epoch": 65.072, "grad_norm": 34.03144454956055, "learning_rate": 1.9444444444444445e-05, "loss": 1.184, "step": 8134 }, { "epoch": 65.08, "grad_norm": 52.28256607055664, "learning_rate": 1.944e-05, "loss": 1.8548, "step": 8135 }, { "epoch": 65.088, "grad_norm": 26.048723220825195, "learning_rate": 1.943555555555556e-05, "loss": 0.9779, "step": 8136 }, { "epoch": 65.096, "grad_norm": 17.73428726196289, "learning_rate": 1.943111111111111e-05, "loss": 0.5345, "step": 8137 }, { "epoch": 65.104, "grad_norm": 32.10350036621094, "learning_rate": 1.9426666666666668e-05, "loss": 0.9919, "step": 8138 }, { "epoch": 65.112, "grad_norm": 21.554981231689453, "learning_rate": 1.9422222222222223e-05, "loss": 0.9263, "step": 8139 }, { "epoch": 65.12, "grad_norm": 15.987414360046387, "learning_rate": 1.9417777777777778e-05, "loss": 0.9032, "step": 8140 }, { "epoch": 65.128, "grad_norm": 25.153980255126953, "learning_rate": 1.9413333333333333e-05, "loss": 0.5437, "step": 8141 }, { "epoch": 65.136, "grad_norm": 22.198392868041992, "learning_rate": 1.940888888888889e-05, "loss": 1.8476, "step": 8142 }, { "epoch": 65.144, "grad_norm": 26.207439422607422, "learning_rate": 1.9404444444444446e-05, "loss": 0.9078, "step": 8143 }, { "epoch": 65.152, "grad_norm": 26.658523559570312, "learning_rate": 1.94e-05, "loss": 0.771, "step": 8144 }, { "epoch": 65.16, "grad_norm": 22.780771255493164, "learning_rate": 1.9395555555555555e-05, "loss": 0.929, "step": 8145 }, { "epoch": 65.168, "grad_norm": 12.672958374023438, "learning_rate": 1.9391111111111114e-05, "loss": 0.6458, "step": 8146 }, { "epoch": 65.176, "grad_norm": 44.06613540649414, "learning_rate": 1.938666666666667e-05, "loss": 1.0651, "step": 8147 }, { "epoch": 65.184, "grad_norm": 36.13819885253906, "learning_rate": 1.9382222222222223e-05, "loss": 0.8047, "step": 8148 }, { "epoch": 65.192, "grad_norm": 29.379987716674805, "learning_rate": 1.9377777777777778e-05, "loss": 0.8239, "step": 8149 }, { "epoch": 65.2, "grad_norm": 45.385196685791016, "learning_rate": 1.9373333333333336e-05, "loss": 1.0102, "step": 8150 }, { "epoch": 65.208, "grad_norm": 24.87386703491211, "learning_rate": 1.9368888888888888e-05, "loss": 0.7448, "step": 8151 }, { "epoch": 65.216, "grad_norm": 44.73655700683594, "learning_rate": 1.9364444444444446e-05, "loss": 0.8566, "step": 8152 }, { "epoch": 65.224, "grad_norm": 122.43297576904297, "learning_rate": 1.936e-05, "loss": 0.9245, "step": 8153 }, { "epoch": 65.232, "grad_norm": 18.156858444213867, "learning_rate": 1.9355555555555556e-05, "loss": 0.7162, "step": 8154 }, { "epoch": 65.24, "grad_norm": 23.63730239868164, "learning_rate": 1.935111111111111e-05, "loss": 0.8357, "step": 8155 }, { "epoch": 65.248, "grad_norm": 67.42018127441406, "learning_rate": 1.934666666666667e-05, "loss": 1.2998, "step": 8156 }, { "epoch": 65.256, "grad_norm": 18.678287506103516, "learning_rate": 1.9342222222222224e-05, "loss": 1.048, "step": 8157 }, { "epoch": 65.264, "grad_norm": 39.25193786621094, "learning_rate": 1.933777777777778e-05, "loss": 0.9082, "step": 8158 }, { "epoch": 65.272, "grad_norm": 35.03403091430664, "learning_rate": 1.9333333333333333e-05, "loss": 1.1008, "step": 8159 }, { "epoch": 65.28, "grad_norm": 18.171358108520508, "learning_rate": 1.932888888888889e-05, "loss": 1.1978, "step": 8160 }, { "epoch": 65.288, "grad_norm": 64.33841705322266, "learning_rate": 1.9324444444444447e-05, "loss": 0.9384, "step": 8161 }, { "epoch": 65.296, "grad_norm": 34.76348876953125, "learning_rate": 1.932e-05, "loss": 0.7531, "step": 8162 }, { "epoch": 65.304, "grad_norm": 27.942508697509766, "learning_rate": 1.9315555555555556e-05, "loss": 0.7076, "step": 8163 }, { "epoch": 65.312, "grad_norm": 23.961618423461914, "learning_rate": 1.9311111111111114e-05, "loss": 0.9863, "step": 8164 }, { "epoch": 65.32, "grad_norm": 44.1189079284668, "learning_rate": 1.9306666666666666e-05, "loss": 0.9046, "step": 8165 }, { "epoch": 65.328, "grad_norm": 13.928812980651855, "learning_rate": 1.9302222222222224e-05, "loss": 1.1893, "step": 8166 }, { "epoch": 65.336, "grad_norm": 29.122398376464844, "learning_rate": 1.929777777777778e-05, "loss": 1.2949, "step": 8167 }, { "epoch": 65.344, "grad_norm": 31.541296005249023, "learning_rate": 1.9293333333333334e-05, "loss": 0.8096, "step": 8168 }, { "epoch": 65.352, "grad_norm": 20.002212524414062, "learning_rate": 1.928888888888889e-05, "loss": 0.8272, "step": 8169 }, { "epoch": 65.36, "grad_norm": 22.238603591918945, "learning_rate": 1.9284444444444447e-05, "loss": 1.2137, "step": 8170 }, { "epoch": 65.368, "grad_norm": 23.486244201660156, "learning_rate": 1.9280000000000002e-05, "loss": 1.0825, "step": 8171 }, { "epoch": 65.376, "grad_norm": 26.401752471923828, "learning_rate": 1.9275555555555557e-05, "loss": 1.3548, "step": 8172 }, { "epoch": 65.384, "grad_norm": 20.544414520263672, "learning_rate": 1.927111111111111e-05, "loss": 0.6136, "step": 8173 }, { "epoch": 65.392, "grad_norm": 25.933996200561523, "learning_rate": 1.926666666666667e-05, "loss": 1.2806, "step": 8174 }, { "epoch": 65.4, "grad_norm": 25.805599212646484, "learning_rate": 1.9262222222222225e-05, "loss": 1.1213, "step": 8175 }, { "epoch": 65.408, "grad_norm": 24.60409164428711, "learning_rate": 1.9257777777777776e-05, "loss": 1.0907, "step": 8176 }, { "epoch": 65.416, "grad_norm": 16.691864013671875, "learning_rate": 1.9253333333333334e-05, "loss": 1.0216, "step": 8177 }, { "epoch": 65.424, "grad_norm": 13.461925506591797, "learning_rate": 1.924888888888889e-05, "loss": 0.8797, "step": 8178 }, { "epoch": 65.432, "grad_norm": 20.458755493164062, "learning_rate": 1.9244444444444444e-05, "loss": 0.9352, "step": 8179 }, { "epoch": 65.44, "grad_norm": 17.96320343017578, "learning_rate": 1.924e-05, "loss": 1.1362, "step": 8180 }, { "epoch": 65.448, "grad_norm": 54.62744903564453, "learning_rate": 1.9235555555555557e-05, "loss": 0.9189, "step": 8181 }, { "epoch": 65.456, "grad_norm": 18.273996353149414, "learning_rate": 1.9231111111111112e-05, "loss": 0.7439, "step": 8182 }, { "epoch": 65.464, "grad_norm": 48.79310607910156, "learning_rate": 1.9226666666666667e-05, "loss": 0.6776, "step": 8183 }, { "epoch": 65.472, "grad_norm": 16.375089645385742, "learning_rate": 1.922222222222222e-05, "loss": 0.7144, "step": 8184 }, { "epoch": 65.48, "grad_norm": 29.86478614807129, "learning_rate": 1.921777777777778e-05, "loss": 0.7201, "step": 8185 }, { "epoch": 65.488, "grad_norm": 30.109703063964844, "learning_rate": 1.9213333333333335e-05, "loss": 0.6459, "step": 8186 }, { "epoch": 65.496, "grad_norm": 19.922595977783203, "learning_rate": 1.920888888888889e-05, "loss": 0.6013, "step": 8187 }, { "epoch": 65.504, "grad_norm": 37.85759735107422, "learning_rate": 1.9204444444444444e-05, "loss": 0.8682, "step": 8188 }, { "epoch": 65.512, "grad_norm": 26.434499740600586, "learning_rate": 1.9200000000000003e-05, "loss": 1.0152, "step": 8189 }, { "epoch": 65.52, "grad_norm": 17.441539764404297, "learning_rate": 1.9195555555555554e-05, "loss": 0.6343, "step": 8190 }, { "epoch": 65.528, "grad_norm": 50.672088623046875, "learning_rate": 1.9191111111111112e-05, "loss": 1.124, "step": 8191 }, { "epoch": 65.536, "grad_norm": 17.19864273071289, "learning_rate": 1.9186666666666667e-05, "loss": 0.6074, "step": 8192 }, { "epoch": 65.544, "grad_norm": 19.563684463500977, "learning_rate": 1.9182222222222225e-05, "loss": 0.9002, "step": 8193 }, { "epoch": 65.552, "grad_norm": 32.584869384765625, "learning_rate": 1.9177777777777777e-05, "loss": 0.8296, "step": 8194 }, { "epoch": 65.56, "grad_norm": 26.919836044311523, "learning_rate": 1.9173333333333335e-05, "loss": 0.7466, "step": 8195 }, { "epoch": 65.568, "grad_norm": 17.572235107421875, "learning_rate": 1.916888888888889e-05, "loss": 0.9287, "step": 8196 }, { "epoch": 65.576, "grad_norm": 29.255813598632812, "learning_rate": 1.9164444444444445e-05, "loss": 0.8567, "step": 8197 }, { "epoch": 65.584, "grad_norm": 16.67352867126465, "learning_rate": 1.916e-05, "loss": 0.7609, "step": 8198 }, { "epoch": 65.592, "grad_norm": 19.703622817993164, "learning_rate": 1.9155555555555558e-05, "loss": 1.1502, "step": 8199 }, { "epoch": 65.6, "grad_norm": 22.076696395874023, "learning_rate": 1.9151111111111113e-05, "loss": 0.9603, "step": 8200 }, { "epoch": 65.608, "grad_norm": 23.261192321777344, "learning_rate": 1.9146666666666667e-05, "loss": 0.7313, "step": 8201 }, { "epoch": 65.616, "grad_norm": 14.578804016113281, "learning_rate": 1.9142222222222222e-05, "loss": 0.6698, "step": 8202 }, { "epoch": 65.624, "grad_norm": 30.38919448852539, "learning_rate": 1.913777777777778e-05, "loss": 1.0065, "step": 8203 }, { "epoch": 65.632, "grad_norm": 18.106138229370117, "learning_rate": 1.9133333333333332e-05, "loss": 0.8893, "step": 8204 }, { "epoch": 65.64, "grad_norm": 31.221397399902344, "learning_rate": 1.912888888888889e-05, "loss": 0.7639, "step": 8205 }, { "epoch": 65.648, "grad_norm": 52.150028228759766, "learning_rate": 1.9124444444444445e-05, "loss": 0.5426, "step": 8206 }, { "epoch": 65.656, "grad_norm": 31.4757080078125, "learning_rate": 1.9120000000000003e-05, "loss": 1.0488, "step": 8207 }, { "epoch": 65.664, "grad_norm": 14.059999465942383, "learning_rate": 1.9115555555555555e-05, "loss": 0.6851, "step": 8208 }, { "epoch": 65.672, "grad_norm": 77.69866180419922, "learning_rate": 1.9111111111111113e-05, "loss": 1.7804, "step": 8209 }, { "epoch": 65.68, "grad_norm": 34.430938720703125, "learning_rate": 1.9106666666666668e-05, "loss": 0.8462, "step": 8210 }, { "epoch": 65.688, "grad_norm": 16.968189239501953, "learning_rate": 1.9102222222222223e-05, "loss": 1.0689, "step": 8211 }, { "epoch": 65.696, "grad_norm": 30.22893714904785, "learning_rate": 1.9097777777777778e-05, "loss": 0.9533, "step": 8212 }, { "epoch": 65.704, "grad_norm": 19.830013275146484, "learning_rate": 1.9093333333333336e-05, "loss": 0.9434, "step": 8213 }, { "epoch": 65.712, "grad_norm": 32.28729248046875, "learning_rate": 1.908888888888889e-05, "loss": 1.0988, "step": 8214 }, { "epoch": 65.72, "grad_norm": 37.06406784057617, "learning_rate": 1.9084444444444445e-05, "loss": 1.0212, "step": 8215 }, { "epoch": 65.728, "grad_norm": 19.712541580200195, "learning_rate": 1.908e-05, "loss": 0.9267, "step": 8216 }, { "epoch": 65.736, "grad_norm": 31.65726661682129, "learning_rate": 1.907555555555556e-05, "loss": 0.8763, "step": 8217 }, { "epoch": 65.744, "grad_norm": 38.42625427246094, "learning_rate": 1.9071111111111113e-05, "loss": 0.937, "step": 8218 }, { "epoch": 65.752, "grad_norm": 13.757843971252441, "learning_rate": 1.9066666666666668e-05, "loss": 1.0847, "step": 8219 }, { "epoch": 65.76, "grad_norm": 139.26751708984375, "learning_rate": 1.9062222222222223e-05, "loss": 0.7126, "step": 8220 }, { "epoch": 65.768, "grad_norm": 25.224546432495117, "learning_rate": 1.905777777777778e-05, "loss": 0.9467, "step": 8221 }, { "epoch": 65.776, "grad_norm": 21.078601837158203, "learning_rate": 1.9053333333333333e-05, "loss": 0.9159, "step": 8222 }, { "epoch": 65.784, "grad_norm": 21.16272735595703, "learning_rate": 1.904888888888889e-05, "loss": 0.6093, "step": 8223 }, { "epoch": 65.792, "grad_norm": 81.11197662353516, "learning_rate": 1.9044444444444446e-05, "loss": 0.7766, "step": 8224 }, { "epoch": 65.8, "grad_norm": 21.430744171142578, "learning_rate": 1.904e-05, "loss": 0.9813, "step": 8225 }, { "epoch": 65.808, "grad_norm": 39.91489028930664, "learning_rate": 1.9035555555555556e-05, "loss": 1.8261, "step": 8226 }, { "epoch": 65.816, "grad_norm": 30.18601417541504, "learning_rate": 1.903111111111111e-05, "loss": 0.7801, "step": 8227 }, { "epoch": 65.824, "grad_norm": 43.78940963745117, "learning_rate": 1.902666666666667e-05, "loss": 1.4721, "step": 8228 }, { "epoch": 65.832, "grad_norm": 27.166566848754883, "learning_rate": 1.9022222222222223e-05, "loss": 1.0368, "step": 8229 }, { "epoch": 65.84, "grad_norm": 41.157203674316406, "learning_rate": 1.9017777777777778e-05, "loss": 1.0663, "step": 8230 }, { "epoch": 65.848, "grad_norm": 34.25742721557617, "learning_rate": 1.9013333333333333e-05, "loss": 3.1525, "step": 8231 }, { "epoch": 65.856, "grad_norm": 64.59375, "learning_rate": 1.900888888888889e-05, "loss": 0.734, "step": 8232 }, { "epoch": 65.864, "grad_norm": 27.3131046295166, "learning_rate": 1.9004444444444443e-05, "loss": 0.6529, "step": 8233 }, { "epoch": 65.872, "grad_norm": 20.208921432495117, "learning_rate": 1.9e-05, "loss": 0.7512, "step": 8234 }, { "epoch": 65.88, "grad_norm": 19.386791229248047, "learning_rate": 1.8995555555555556e-05, "loss": 1.18, "step": 8235 }, { "epoch": 65.888, "grad_norm": 15.637340545654297, "learning_rate": 1.899111111111111e-05, "loss": 0.9764, "step": 8236 }, { "epoch": 65.896, "grad_norm": 19.81922149658203, "learning_rate": 1.8986666666666666e-05, "loss": 1.1562, "step": 8237 }, { "epoch": 65.904, "grad_norm": 57.35364532470703, "learning_rate": 1.8982222222222224e-05, "loss": 0.7342, "step": 8238 }, { "epoch": 65.912, "grad_norm": 21.94710922241211, "learning_rate": 1.897777777777778e-05, "loss": 1.0926, "step": 8239 }, { "epoch": 65.92, "grad_norm": 27.49052619934082, "learning_rate": 1.8973333333333334e-05, "loss": 1.9813, "step": 8240 }, { "epoch": 65.928, "grad_norm": 26.52089500427246, "learning_rate": 1.896888888888889e-05, "loss": 0.7988, "step": 8241 }, { "epoch": 65.936, "grad_norm": 20.073707580566406, "learning_rate": 1.8964444444444447e-05, "loss": 0.8256, "step": 8242 }, { "epoch": 65.944, "grad_norm": 33.996341705322266, "learning_rate": 1.896e-05, "loss": 1.125, "step": 8243 }, { "epoch": 65.952, "grad_norm": 18.616870880126953, "learning_rate": 1.8955555555555556e-05, "loss": 0.7555, "step": 8244 }, { "epoch": 65.96, "grad_norm": 26.6098690032959, "learning_rate": 1.895111111111111e-05, "loss": 0.6478, "step": 8245 }, { "epoch": 65.968, "grad_norm": 32.136878967285156, "learning_rate": 1.894666666666667e-05, "loss": 0.9077, "step": 8246 }, { "epoch": 65.976, "grad_norm": 24.2178897857666, "learning_rate": 1.894222222222222e-05, "loss": 0.7228, "step": 8247 }, { "epoch": 65.984, "grad_norm": 23.710498809814453, "learning_rate": 1.893777777777778e-05, "loss": 0.674, "step": 8248 }, { "epoch": 65.992, "grad_norm": 27.19399642944336, "learning_rate": 1.8933333333333334e-05, "loss": 0.6811, "step": 8249 }, { "epoch": 66.0, "grad_norm": 225.1502685546875, "learning_rate": 1.8928888888888892e-05, "loss": 0.6499, "step": 8250 }, { "epoch": 66.0, "eval_loss": 0.9931555390357971, "eval_map": 0.4324, "eval_map_50": 0.7927, "eval_map_75": 0.4014, "eval_map_Coverall": 0.6024, "eval_map_Face_Shield": 0.5159, "eval_map_Gloves": 0.3962, "eval_map_Goggles": 0.2204, "eval_map_Mask": 0.4268, "eval_map_large": 0.6229, "eval_map_medium": 0.3083, "eval_map_small": 0.3445, "eval_mar_1": 0.3292, "eval_mar_10": 0.574, "eval_mar_100": 0.5897, "eval_mar_100_Coverall": 0.7267, "eval_mar_100_Face_Shield": 0.7176, "eval_mar_100_Gloves": 0.4984, "eval_mar_100_Goggles": 0.4906, "eval_mar_100_Mask": 0.5154, "eval_mar_large": 0.7343, "eval_mar_medium": 0.4524, "eval_mar_small": 0.4106, "eval_runtime": 0.934, "eval_samples_per_second": 31.05, "eval_steps_per_second": 2.141, "step": 8250 }, { "epoch": 66.008, "grad_norm": 53.111656188964844, "learning_rate": 1.8924444444444444e-05, "loss": 0.778, "step": 8251 }, { "epoch": 66.016, "grad_norm": 32.483341217041016, "learning_rate": 1.8920000000000002e-05, "loss": 1.594, "step": 8252 }, { "epoch": 66.024, "grad_norm": 22.4739990234375, "learning_rate": 1.8915555555555557e-05, "loss": 1.013, "step": 8253 }, { "epoch": 66.032, "grad_norm": 20.268632888793945, "learning_rate": 1.891111111111111e-05, "loss": 1.1567, "step": 8254 }, { "epoch": 66.04, "grad_norm": 16.818845748901367, "learning_rate": 1.8906666666666666e-05, "loss": 0.7371, "step": 8255 }, { "epoch": 66.048, "grad_norm": 23.264144897460938, "learning_rate": 1.8902222222222225e-05, "loss": 1.1888, "step": 8256 }, { "epoch": 66.056, "grad_norm": 18.67896270751953, "learning_rate": 1.889777777777778e-05, "loss": 0.6865, "step": 8257 }, { "epoch": 66.064, "grad_norm": 37.49720764160156, "learning_rate": 1.8893333333333334e-05, "loss": 1.1199, "step": 8258 }, { "epoch": 66.072, "grad_norm": 20.470781326293945, "learning_rate": 1.888888888888889e-05, "loss": 2.2784, "step": 8259 }, { "epoch": 66.08, "grad_norm": 45.20446014404297, "learning_rate": 1.8884444444444447e-05, "loss": 1.0916, "step": 8260 }, { "epoch": 66.088, "grad_norm": 26.89742660522461, "learning_rate": 1.888e-05, "loss": 0.7639, "step": 8261 }, { "epoch": 66.096, "grad_norm": 42.462867736816406, "learning_rate": 1.8875555555555557e-05, "loss": 0.8606, "step": 8262 }, { "epoch": 66.104, "grad_norm": 51.067291259765625, "learning_rate": 1.8871111111111112e-05, "loss": 1.544, "step": 8263 }, { "epoch": 66.112, "grad_norm": 31.183820724487305, "learning_rate": 1.886666666666667e-05, "loss": 0.8929, "step": 8264 }, { "epoch": 66.12, "grad_norm": 19.22953224182129, "learning_rate": 1.886222222222222e-05, "loss": 0.9718, "step": 8265 }, { "epoch": 66.128, "grad_norm": 31.009239196777344, "learning_rate": 1.885777777777778e-05, "loss": 1.1537, "step": 8266 }, { "epoch": 66.136, "grad_norm": 25.933250427246094, "learning_rate": 1.8853333333333335e-05, "loss": 1.105, "step": 8267 }, { "epoch": 66.144, "grad_norm": 26.358718872070312, "learning_rate": 1.884888888888889e-05, "loss": 0.8861, "step": 8268 }, { "epoch": 66.152, "grad_norm": 24.58597183227539, "learning_rate": 1.8844444444444444e-05, "loss": 0.8771, "step": 8269 }, { "epoch": 66.16, "grad_norm": 24.707080841064453, "learning_rate": 1.8840000000000003e-05, "loss": 0.8191, "step": 8270 }, { "epoch": 66.168, "grad_norm": 26.728103637695312, "learning_rate": 1.8835555555555557e-05, "loss": 0.7558, "step": 8271 }, { "epoch": 66.176, "grad_norm": 31.770544052124023, "learning_rate": 1.8831111111111112e-05, "loss": 0.9054, "step": 8272 }, { "epoch": 66.184, "grad_norm": 14.933594703674316, "learning_rate": 1.8826666666666667e-05, "loss": 0.7198, "step": 8273 }, { "epoch": 66.192, "grad_norm": 14.886249542236328, "learning_rate": 1.8822222222222225e-05, "loss": 1.0885, "step": 8274 }, { "epoch": 66.2, "grad_norm": 32.72271728515625, "learning_rate": 1.881777777777778e-05, "loss": 0.7772, "step": 8275 }, { "epoch": 66.208, "grad_norm": 27.800304412841797, "learning_rate": 1.8813333333333335e-05, "loss": 0.7094, "step": 8276 }, { "epoch": 66.216, "grad_norm": 29.242765426635742, "learning_rate": 1.880888888888889e-05, "loss": 0.8431, "step": 8277 }, { "epoch": 66.224, "grad_norm": 25.574260711669922, "learning_rate": 1.8804444444444445e-05, "loss": 0.7921, "step": 8278 }, { "epoch": 66.232, "grad_norm": 19.290470123291016, "learning_rate": 1.88e-05, "loss": 0.9035, "step": 8279 }, { "epoch": 66.24, "grad_norm": 60.54963684082031, "learning_rate": 1.8795555555555554e-05, "loss": 1.0931, "step": 8280 }, { "epoch": 66.248, "grad_norm": 22.439586639404297, "learning_rate": 1.8791111111111113e-05, "loss": 0.9204, "step": 8281 }, { "epoch": 66.256, "grad_norm": 29.713533401489258, "learning_rate": 1.8786666666666667e-05, "loss": 1.0633, "step": 8282 }, { "epoch": 66.264, "grad_norm": 147.6241455078125, "learning_rate": 1.8782222222222222e-05, "loss": 0.8662, "step": 8283 }, { "epoch": 66.272, "grad_norm": 28.404855728149414, "learning_rate": 1.8777777777777777e-05, "loss": 0.807, "step": 8284 }, { "epoch": 66.28, "grad_norm": 31.46180534362793, "learning_rate": 1.8773333333333335e-05, "loss": 0.9645, "step": 8285 }, { "epoch": 66.288, "grad_norm": 16.072772979736328, "learning_rate": 1.876888888888889e-05, "loss": 0.9208, "step": 8286 }, { "epoch": 66.296, "grad_norm": 20.915102005004883, "learning_rate": 1.8764444444444445e-05, "loss": 0.9205, "step": 8287 }, { "epoch": 66.304, "grad_norm": 21.253141403198242, "learning_rate": 1.876e-05, "loss": 0.8685, "step": 8288 }, { "epoch": 66.312, "grad_norm": 72.23275756835938, "learning_rate": 1.8755555555555558e-05, "loss": 0.8208, "step": 8289 }, { "epoch": 66.32, "grad_norm": 46.41697692871094, "learning_rate": 1.875111111111111e-05, "loss": 0.7663, "step": 8290 }, { "epoch": 66.328, "grad_norm": 18.516265869140625, "learning_rate": 1.8746666666666668e-05, "loss": 1.6416, "step": 8291 }, { "epoch": 66.336, "grad_norm": 41.29497528076172, "learning_rate": 1.8742222222222223e-05, "loss": 1.2048, "step": 8292 }, { "epoch": 66.344, "grad_norm": 38.88628387451172, "learning_rate": 1.8737777777777778e-05, "loss": 1.0264, "step": 8293 }, { "epoch": 66.352, "grad_norm": 32.25786590576172, "learning_rate": 1.8733333333333332e-05, "loss": 1.4116, "step": 8294 }, { "epoch": 66.36, "grad_norm": 38.32638168334961, "learning_rate": 1.872888888888889e-05, "loss": 1.2129, "step": 8295 }, { "epoch": 66.368, "grad_norm": 29.89151382446289, "learning_rate": 1.8724444444444445e-05, "loss": 0.6028, "step": 8296 }, { "epoch": 66.376, "grad_norm": 41.120147705078125, "learning_rate": 1.872e-05, "loss": 0.7752, "step": 8297 }, { "epoch": 66.384, "grad_norm": 29.868192672729492, "learning_rate": 1.8715555555555555e-05, "loss": 0.7051, "step": 8298 }, { "epoch": 66.392, "grad_norm": 20.72966766357422, "learning_rate": 1.8711111111111113e-05, "loss": 1.0146, "step": 8299 }, { "epoch": 66.4, "grad_norm": 19.55097198486328, "learning_rate": 1.8706666666666668e-05, "loss": 0.8273, "step": 8300 }, { "epoch": 66.408, "grad_norm": 24.452499389648438, "learning_rate": 1.8702222222222223e-05, "loss": 0.8559, "step": 8301 }, { "epoch": 66.416, "grad_norm": 14.336222648620605, "learning_rate": 1.8697777777777778e-05, "loss": 0.9175, "step": 8302 }, { "epoch": 66.424, "grad_norm": 37.256229400634766, "learning_rate": 1.8693333333333336e-05, "loss": 0.7359, "step": 8303 }, { "epoch": 66.432, "grad_norm": 18.09374237060547, "learning_rate": 1.8688888888888888e-05, "loss": 0.9871, "step": 8304 }, { "epoch": 66.44, "grad_norm": 23.96854019165039, "learning_rate": 1.8684444444444446e-05, "loss": 0.8504, "step": 8305 }, { "epoch": 66.448, "grad_norm": 43.54706954956055, "learning_rate": 1.868e-05, "loss": 1.0303, "step": 8306 }, { "epoch": 66.456, "grad_norm": 13.656111717224121, "learning_rate": 1.8675555555555556e-05, "loss": 1.393, "step": 8307 }, { "epoch": 66.464, "grad_norm": 33.87677001953125, "learning_rate": 1.867111111111111e-05, "loss": 0.9899, "step": 8308 }, { "epoch": 66.472, "grad_norm": 14.892839431762695, "learning_rate": 1.866666666666667e-05, "loss": 1.3281, "step": 8309 }, { "epoch": 66.48, "grad_norm": 31.136693954467773, "learning_rate": 1.8662222222222223e-05, "loss": 0.6573, "step": 8310 }, { "epoch": 66.488, "grad_norm": 70.22965240478516, "learning_rate": 1.865777777777778e-05, "loss": 0.9724, "step": 8311 }, { "epoch": 66.496, "grad_norm": 29.199804306030273, "learning_rate": 1.8653333333333333e-05, "loss": 1.9486, "step": 8312 }, { "epoch": 66.504, "grad_norm": 28.28957748413086, "learning_rate": 1.864888888888889e-05, "loss": 1.0973, "step": 8313 }, { "epoch": 66.512, "grad_norm": 30.4561710357666, "learning_rate": 1.8644444444444446e-05, "loss": 0.972, "step": 8314 }, { "epoch": 66.52, "grad_norm": 26.537370681762695, "learning_rate": 1.864e-05, "loss": 0.5723, "step": 8315 }, { "epoch": 66.528, "grad_norm": 23.80327033996582, "learning_rate": 1.8635555555555556e-05, "loss": 0.8932, "step": 8316 }, { "epoch": 66.536, "grad_norm": 35.354949951171875, "learning_rate": 1.8631111111111114e-05, "loss": 0.9384, "step": 8317 }, { "epoch": 66.544, "grad_norm": 28.238733291625977, "learning_rate": 1.8626666666666666e-05, "loss": 0.7975, "step": 8318 }, { "epoch": 66.552, "grad_norm": 82.34473419189453, "learning_rate": 1.8622222222222224e-05, "loss": 3.0225, "step": 8319 }, { "epoch": 66.56, "grad_norm": 34.31487274169922, "learning_rate": 1.861777777777778e-05, "loss": 0.9169, "step": 8320 }, { "epoch": 66.568, "grad_norm": 19.90378761291504, "learning_rate": 1.8613333333333337e-05, "loss": 0.7936, "step": 8321 }, { "epoch": 66.576, "grad_norm": 152.11512756347656, "learning_rate": 1.860888888888889e-05, "loss": 0.7464, "step": 8322 }, { "epoch": 66.584, "grad_norm": 15.754334449768066, "learning_rate": 1.8604444444444447e-05, "loss": 0.9348, "step": 8323 }, { "epoch": 66.592, "grad_norm": 30.542512893676758, "learning_rate": 1.86e-05, "loss": 0.8695, "step": 8324 }, { "epoch": 66.6, "grad_norm": 20.138893127441406, "learning_rate": 1.8595555555555556e-05, "loss": 0.8, "step": 8325 }, { "epoch": 66.608, "grad_norm": 25.057235717773438, "learning_rate": 1.859111111111111e-05, "loss": 0.9312, "step": 8326 }, { "epoch": 66.616, "grad_norm": 20.890188217163086, "learning_rate": 1.858666666666667e-05, "loss": 0.9357, "step": 8327 }, { "epoch": 66.624, "grad_norm": 62.64714813232422, "learning_rate": 1.8582222222222224e-05, "loss": 1.1151, "step": 8328 }, { "epoch": 66.632, "grad_norm": 17.58316993713379, "learning_rate": 1.8577777777777776e-05, "loss": 1.0482, "step": 8329 }, { "epoch": 66.64, "grad_norm": 23.55628204345703, "learning_rate": 1.8573333333333334e-05, "loss": 0.8347, "step": 8330 }, { "epoch": 66.648, "grad_norm": 128.70315551757812, "learning_rate": 1.856888888888889e-05, "loss": 0.9371, "step": 8331 }, { "epoch": 66.656, "grad_norm": 83.8910903930664, "learning_rate": 1.8564444444444447e-05, "loss": 1.4277, "step": 8332 }, { "epoch": 66.664, "grad_norm": 24.71268081665039, "learning_rate": 1.856e-05, "loss": 0.6657, "step": 8333 }, { "epoch": 66.672, "grad_norm": 39.738277435302734, "learning_rate": 1.8555555555555557e-05, "loss": 1.6978, "step": 8334 }, { "epoch": 66.68, "grad_norm": 79.80862426757812, "learning_rate": 1.855111111111111e-05, "loss": 0.6321, "step": 8335 }, { "epoch": 66.688, "grad_norm": 24.099815368652344, "learning_rate": 1.8546666666666666e-05, "loss": 0.7473, "step": 8336 }, { "epoch": 66.696, "grad_norm": 72.76426696777344, "learning_rate": 1.854222222222222e-05, "loss": 1.1577, "step": 8337 }, { "epoch": 66.704, "grad_norm": 46.356929779052734, "learning_rate": 1.853777777777778e-05, "loss": 1.2116, "step": 8338 }, { "epoch": 66.712, "grad_norm": 58.77454376220703, "learning_rate": 1.8533333333333334e-05, "loss": 1.7001, "step": 8339 }, { "epoch": 66.72, "grad_norm": 32.39127731323242, "learning_rate": 1.852888888888889e-05, "loss": 1.031, "step": 8340 }, { "epoch": 66.728, "grad_norm": 22.649864196777344, "learning_rate": 1.8524444444444444e-05, "loss": 0.7899, "step": 8341 }, { "epoch": 66.736, "grad_norm": 22.529111862182617, "learning_rate": 1.8520000000000002e-05, "loss": 0.6774, "step": 8342 }, { "epoch": 66.744, "grad_norm": 17.286340713500977, "learning_rate": 1.8515555555555557e-05, "loss": 0.8011, "step": 8343 }, { "epoch": 66.752, "grad_norm": 16.786226272583008, "learning_rate": 1.8511111111111112e-05, "loss": 0.8111, "step": 8344 }, { "epoch": 66.76, "grad_norm": 23.827838897705078, "learning_rate": 1.8506666666666667e-05, "loss": 0.9957, "step": 8345 }, { "epoch": 66.768, "grad_norm": 20.340496063232422, "learning_rate": 1.8502222222222225e-05, "loss": 0.944, "step": 8346 }, { "epoch": 66.776, "grad_norm": 22.535015106201172, "learning_rate": 1.8497777777777776e-05, "loss": 0.6828, "step": 8347 }, { "epoch": 66.784, "grad_norm": 26.40909767150879, "learning_rate": 1.8493333333333335e-05, "loss": 0.8449, "step": 8348 }, { "epoch": 66.792, "grad_norm": 31.410295486450195, "learning_rate": 1.848888888888889e-05, "loss": 0.6419, "step": 8349 }, { "epoch": 66.8, "grad_norm": 11.754594802856445, "learning_rate": 1.8484444444444444e-05, "loss": 0.7282, "step": 8350 }, { "epoch": 66.808, "grad_norm": 60.969722747802734, "learning_rate": 1.848e-05, "loss": 0.9802, "step": 8351 }, { "epoch": 66.816, "grad_norm": 27.828563690185547, "learning_rate": 1.8475555555555557e-05, "loss": 1.048, "step": 8352 }, { "epoch": 66.824, "grad_norm": 30.282724380493164, "learning_rate": 1.8471111111111112e-05, "loss": 0.9379, "step": 8353 }, { "epoch": 66.832, "grad_norm": 18.814197540283203, "learning_rate": 1.8466666666666667e-05, "loss": 1.1224, "step": 8354 }, { "epoch": 66.84, "grad_norm": 20.793785095214844, "learning_rate": 1.8462222222222222e-05, "loss": 0.8261, "step": 8355 }, { "epoch": 66.848, "grad_norm": 23.664934158325195, "learning_rate": 1.845777777777778e-05, "loss": 1.1821, "step": 8356 }, { "epoch": 66.856, "grad_norm": 23.496667861938477, "learning_rate": 1.8453333333333335e-05, "loss": 0.9643, "step": 8357 }, { "epoch": 66.864, "grad_norm": 29.013700485229492, "learning_rate": 1.844888888888889e-05, "loss": 1.1792, "step": 8358 }, { "epoch": 66.872, "grad_norm": 62.027130126953125, "learning_rate": 1.8444444444444445e-05, "loss": 0.5263, "step": 8359 }, { "epoch": 66.88, "grad_norm": 20.49076271057129, "learning_rate": 1.8440000000000003e-05, "loss": 0.9996, "step": 8360 }, { "epoch": 66.888, "grad_norm": 33.275421142578125, "learning_rate": 1.8435555555555554e-05, "loss": 0.6483, "step": 8361 }, { "epoch": 66.896, "grad_norm": 29.2447509765625, "learning_rate": 1.8431111111111113e-05, "loss": 0.6765, "step": 8362 }, { "epoch": 66.904, "grad_norm": 17.599191665649414, "learning_rate": 1.8426666666666668e-05, "loss": 0.7855, "step": 8363 }, { "epoch": 66.912, "grad_norm": 61.22190856933594, "learning_rate": 1.8422222222222222e-05, "loss": 0.7765, "step": 8364 }, { "epoch": 66.92, "grad_norm": 15.873064994812012, "learning_rate": 1.8417777777777777e-05, "loss": 0.678, "step": 8365 }, { "epoch": 66.928, "grad_norm": 287.6834716796875, "learning_rate": 1.8413333333333335e-05, "loss": 0.9093, "step": 8366 }, { "epoch": 66.936, "grad_norm": 22.781431198120117, "learning_rate": 1.840888888888889e-05, "loss": 0.9338, "step": 8367 }, { "epoch": 66.944, "grad_norm": 21.9698429107666, "learning_rate": 1.8404444444444445e-05, "loss": 0.8699, "step": 8368 }, { "epoch": 66.952, "grad_norm": 32.950042724609375, "learning_rate": 1.84e-05, "loss": 1.3602, "step": 8369 }, { "epoch": 66.96, "grad_norm": 28.08241844177246, "learning_rate": 1.8395555555555558e-05, "loss": 1.0038, "step": 8370 }, { "epoch": 66.968, "grad_norm": 29.804912567138672, "learning_rate": 1.8391111111111113e-05, "loss": 0.8409, "step": 8371 }, { "epoch": 66.976, "grad_norm": 16.69090461730957, "learning_rate": 1.8386666666666668e-05, "loss": 0.8538, "step": 8372 }, { "epoch": 66.984, "grad_norm": 19.94312286376953, "learning_rate": 1.8382222222222223e-05, "loss": 0.8554, "step": 8373 }, { "epoch": 66.992, "grad_norm": 23.0439510345459, "learning_rate": 1.837777777777778e-05, "loss": 0.8985, "step": 8374 }, { "epoch": 67.0, "grad_norm": 18.68393898010254, "learning_rate": 1.8373333333333332e-05, "loss": 0.8732, "step": 8375 }, { "epoch": 67.0, "eval_loss": 0.9858114123344421, "eval_map": 0.4373, "eval_map_50": 0.8069, "eval_map_75": 0.3974, "eval_map_Coverall": 0.6275, "eval_map_Face_Shield": 0.5482, "eval_map_Gloves": 0.3624, "eval_map_Goggles": 0.2357, "eval_map_Mask": 0.4126, "eval_map_large": 0.6449, "eval_map_medium": 0.3114, "eval_map_small": 0.3122, "eval_mar_1": 0.3358, "eval_mar_10": 0.5653, "eval_mar_100": 0.5806, "eval_mar_100_Coverall": 0.7178, "eval_mar_100_Face_Shield": 0.7118, "eval_mar_100_Gloves": 0.4885, "eval_mar_100_Goggles": 0.4812, "eval_mar_100_Mask": 0.5038, "eval_mar_large": 0.7347, "eval_mar_medium": 0.4486, "eval_mar_small": 0.4262, "eval_runtime": 0.9252, "eval_samples_per_second": 31.346, "eval_steps_per_second": 2.162, "step": 8375 }, { "epoch": 67.008, "grad_norm": 24.6295166015625, "learning_rate": 1.836888888888889e-05, "loss": 1.0586, "step": 8376 }, { "epoch": 67.016, "grad_norm": 26.566579818725586, "learning_rate": 1.8364444444444446e-05, "loss": 0.7243, "step": 8377 }, { "epoch": 67.024, "grad_norm": 15.527758598327637, "learning_rate": 1.8360000000000004e-05, "loss": 0.9131, "step": 8378 }, { "epoch": 67.032, "grad_norm": 44.57094192504883, "learning_rate": 1.8355555555555555e-05, "loss": 1.1481, "step": 8379 }, { "epoch": 67.04, "grad_norm": 20.438982009887695, "learning_rate": 1.8351111111111113e-05, "loss": 0.9298, "step": 8380 }, { "epoch": 67.048, "grad_norm": 24.295475006103516, "learning_rate": 1.834666666666667e-05, "loss": 1.1399, "step": 8381 }, { "epoch": 67.056, "grad_norm": 16.406780242919922, "learning_rate": 1.8342222222222223e-05, "loss": 0.7858, "step": 8382 }, { "epoch": 67.064, "grad_norm": 43.87119674682617, "learning_rate": 1.8337777777777778e-05, "loss": 0.9229, "step": 8383 }, { "epoch": 67.072, "grad_norm": 24.070308685302734, "learning_rate": 1.8333333333333333e-05, "loss": 0.7334, "step": 8384 }, { "epoch": 67.08, "grad_norm": 13.304052352905273, "learning_rate": 1.832888888888889e-05, "loss": 1.2093, "step": 8385 }, { "epoch": 67.088, "grad_norm": 37.19035339355469, "learning_rate": 1.8324444444444443e-05, "loss": 0.8127, "step": 8386 }, { "epoch": 67.096, "grad_norm": 12.080044746398926, "learning_rate": 1.832e-05, "loss": 0.8903, "step": 8387 }, { "epoch": 67.104, "grad_norm": 28.385175704956055, "learning_rate": 1.8315555555555556e-05, "loss": 0.8409, "step": 8388 }, { "epoch": 67.112, "grad_norm": 10.71183967590332, "learning_rate": 1.8311111111111114e-05, "loss": 0.7804, "step": 8389 }, { "epoch": 67.12, "grad_norm": 30.0295352935791, "learning_rate": 1.8306666666666665e-05, "loss": 0.9108, "step": 8390 }, { "epoch": 67.128, "grad_norm": 19.786190032958984, "learning_rate": 1.8302222222222224e-05, "loss": 1.1365, "step": 8391 }, { "epoch": 67.136, "grad_norm": 13.187246322631836, "learning_rate": 1.829777777777778e-05, "loss": 0.8441, "step": 8392 }, { "epoch": 67.144, "grad_norm": 29.537141799926758, "learning_rate": 1.8293333333333333e-05, "loss": 0.7936, "step": 8393 }, { "epoch": 67.152, "grad_norm": 26.03923797607422, "learning_rate": 1.8288888888888888e-05, "loss": 2.1339, "step": 8394 }, { "epoch": 67.16, "grad_norm": 19.58879280090332, "learning_rate": 1.8284444444444446e-05, "loss": 0.7761, "step": 8395 }, { "epoch": 67.168, "grad_norm": 22.512279510498047, "learning_rate": 1.828e-05, "loss": 0.678, "step": 8396 }, { "epoch": 67.176, "grad_norm": 44.37029266357422, "learning_rate": 1.8275555555555556e-05, "loss": 0.8659, "step": 8397 }, { "epoch": 67.184, "grad_norm": 29.610576629638672, "learning_rate": 1.827111111111111e-05, "loss": 0.9097, "step": 8398 }, { "epoch": 67.192, "grad_norm": 23.701677322387695, "learning_rate": 1.826666666666667e-05, "loss": 0.7311, "step": 8399 }, { "epoch": 67.2, "grad_norm": 80.79283905029297, "learning_rate": 1.826222222222222e-05, "loss": 0.6095, "step": 8400 }, { "epoch": 67.208, "grad_norm": 23.407588958740234, "learning_rate": 1.825777777777778e-05, "loss": 0.9412, "step": 8401 }, { "epoch": 67.216, "grad_norm": 32.19758605957031, "learning_rate": 1.8253333333333334e-05, "loss": 1.3867, "step": 8402 }, { "epoch": 67.224, "grad_norm": 29.784366607666016, "learning_rate": 1.8248888888888892e-05, "loss": 0.7991, "step": 8403 }, { "epoch": 67.232, "grad_norm": 66.72987365722656, "learning_rate": 1.8244444444444443e-05, "loss": 0.7764, "step": 8404 }, { "epoch": 67.24, "grad_norm": 28.05963134765625, "learning_rate": 1.824e-05, "loss": 1.1254, "step": 8405 }, { "epoch": 67.248, "grad_norm": 25.19721221923828, "learning_rate": 1.8235555555555556e-05, "loss": 1.2577, "step": 8406 }, { "epoch": 67.256, "grad_norm": 28.841346740722656, "learning_rate": 1.823111111111111e-05, "loss": 1.7932, "step": 8407 }, { "epoch": 67.264, "grad_norm": 70.03002166748047, "learning_rate": 1.8226666666666666e-05, "loss": 1.026, "step": 8408 }, { "epoch": 67.272, "grad_norm": 15.219738960266113, "learning_rate": 1.8222222222222224e-05, "loss": 0.7355, "step": 8409 }, { "epoch": 67.28, "grad_norm": 27.428142547607422, "learning_rate": 1.821777777777778e-05, "loss": 1.0222, "step": 8410 }, { "epoch": 67.288, "grad_norm": 18.773635864257812, "learning_rate": 1.8213333333333334e-05, "loss": 0.7801, "step": 8411 }, { "epoch": 67.296, "grad_norm": 140.05604553222656, "learning_rate": 1.820888888888889e-05, "loss": 0.8113, "step": 8412 }, { "epoch": 67.304, "grad_norm": 24.302509307861328, "learning_rate": 1.8204444444444447e-05, "loss": 1.0417, "step": 8413 }, { "epoch": 67.312, "grad_norm": 24.38848114013672, "learning_rate": 1.8200000000000002e-05, "loss": 0.8184, "step": 8414 }, { "epoch": 67.32, "grad_norm": 60.05647659301758, "learning_rate": 1.8195555555555557e-05, "loss": 1.8793, "step": 8415 }, { "epoch": 67.328, "grad_norm": 24.019855499267578, "learning_rate": 1.819111111111111e-05, "loss": 0.8928, "step": 8416 }, { "epoch": 67.336, "grad_norm": 40.70835876464844, "learning_rate": 1.818666666666667e-05, "loss": 0.7737, "step": 8417 }, { "epoch": 67.344, "grad_norm": 22.918533325195312, "learning_rate": 1.818222222222222e-05, "loss": 0.756, "step": 8418 }, { "epoch": 67.352, "grad_norm": 29.076032638549805, "learning_rate": 1.817777777777778e-05, "loss": 0.7616, "step": 8419 }, { "epoch": 67.36, "grad_norm": 30.675230026245117, "learning_rate": 1.8173333333333334e-05, "loss": 1.2198, "step": 8420 }, { "epoch": 67.368, "grad_norm": 16.138580322265625, "learning_rate": 1.816888888888889e-05, "loss": 0.8631, "step": 8421 }, { "epoch": 67.376, "grad_norm": 19.13457679748535, "learning_rate": 1.8164444444444444e-05, "loss": 0.8719, "step": 8422 }, { "epoch": 67.384, "grad_norm": 25.873010635375977, "learning_rate": 1.8160000000000002e-05, "loss": 0.7971, "step": 8423 }, { "epoch": 67.392, "grad_norm": 31.919233322143555, "learning_rate": 1.8155555555555557e-05, "loss": 1.2254, "step": 8424 }, { "epoch": 67.4, "grad_norm": 68.01958465576172, "learning_rate": 1.8151111111111112e-05, "loss": 0.7343, "step": 8425 }, { "epoch": 67.408, "grad_norm": 16.765336990356445, "learning_rate": 1.8146666666666667e-05, "loss": 1.0922, "step": 8426 }, { "epoch": 67.416, "grad_norm": 14.555087089538574, "learning_rate": 1.8142222222222225e-05, "loss": 0.8404, "step": 8427 }, { "epoch": 67.424, "grad_norm": 37.34844207763672, "learning_rate": 1.813777777777778e-05, "loss": 2.8704, "step": 8428 }, { "epoch": 67.432, "grad_norm": 21.8177547454834, "learning_rate": 1.8133333333333335e-05, "loss": 0.8967, "step": 8429 }, { "epoch": 67.44, "grad_norm": 12.160787582397461, "learning_rate": 1.812888888888889e-05, "loss": 0.715, "step": 8430 }, { "epoch": 67.448, "grad_norm": 16.4807186126709, "learning_rate": 1.8124444444444448e-05, "loss": 0.5998, "step": 8431 }, { "epoch": 67.456, "grad_norm": 22.09857177734375, "learning_rate": 1.812e-05, "loss": 0.7804, "step": 8432 }, { "epoch": 67.464, "grad_norm": 24.389360427856445, "learning_rate": 1.8115555555555554e-05, "loss": 0.8706, "step": 8433 }, { "epoch": 67.472, "grad_norm": 17.680212020874023, "learning_rate": 1.8111111111111112e-05, "loss": 1.1638, "step": 8434 }, { "epoch": 67.48, "grad_norm": 86.57984924316406, "learning_rate": 1.8106666666666667e-05, "loss": 1.3231, "step": 8435 }, { "epoch": 67.488, "grad_norm": 12.663910865783691, "learning_rate": 1.8102222222222222e-05, "loss": 1.1815, "step": 8436 }, { "epoch": 67.496, "grad_norm": 23.879552841186523, "learning_rate": 1.8097777777777777e-05, "loss": 0.9442, "step": 8437 }, { "epoch": 67.504, "grad_norm": 18.510730743408203, "learning_rate": 1.8093333333333335e-05, "loss": 1.1606, "step": 8438 }, { "epoch": 67.512, "grad_norm": 22.29662322998047, "learning_rate": 1.808888888888889e-05, "loss": 0.8782, "step": 8439 }, { "epoch": 67.52, "grad_norm": 22.078601837158203, "learning_rate": 1.8084444444444445e-05, "loss": 1.041, "step": 8440 }, { "epoch": 67.528, "grad_norm": 36.08121871948242, "learning_rate": 1.808e-05, "loss": 0.8922, "step": 8441 }, { "epoch": 67.536, "grad_norm": 43.82579040527344, "learning_rate": 1.8075555555555558e-05, "loss": 2.1201, "step": 8442 }, { "epoch": 67.544, "grad_norm": 16.444887161254883, "learning_rate": 1.807111111111111e-05, "loss": 0.915, "step": 8443 }, { "epoch": 67.552, "grad_norm": 16.74530601501465, "learning_rate": 1.8066666666666668e-05, "loss": 0.7371, "step": 8444 }, { "epoch": 67.56, "grad_norm": 76.57181549072266, "learning_rate": 1.8062222222222222e-05, "loss": 0.59, "step": 8445 }, { "epoch": 67.568, "grad_norm": 20.231426239013672, "learning_rate": 1.805777777777778e-05, "loss": 0.6548, "step": 8446 }, { "epoch": 67.576, "grad_norm": 17.24272346496582, "learning_rate": 1.8053333333333332e-05, "loss": 0.8221, "step": 8447 }, { "epoch": 67.584, "grad_norm": 44.30107879638672, "learning_rate": 1.804888888888889e-05, "loss": 0.6861, "step": 8448 }, { "epoch": 67.592, "grad_norm": 26.921043395996094, "learning_rate": 1.8044444444444445e-05, "loss": 0.6876, "step": 8449 }, { "epoch": 67.6, "grad_norm": 37.359867095947266, "learning_rate": 1.804e-05, "loss": 1.0987, "step": 8450 }, { "epoch": 67.608, "grad_norm": 33.98978042602539, "learning_rate": 1.8035555555555555e-05, "loss": 1.3674, "step": 8451 }, { "epoch": 67.616, "grad_norm": 18.762447357177734, "learning_rate": 1.8031111111111113e-05, "loss": 0.7531, "step": 8452 }, { "epoch": 67.624, "grad_norm": 39.343963623046875, "learning_rate": 1.8026666666666668e-05, "loss": 0.7529, "step": 8453 }, { "epoch": 67.632, "grad_norm": 23.164243698120117, "learning_rate": 1.8022222222222223e-05, "loss": 0.7979, "step": 8454 }, { "epoch": 67.64, "grad_norm": 25.268352508544922, "learning_rate": 1.8017777777777778e-05, "loss": 1.0252, "step": 8455 }, { "epoch": 67.648, "grad_norm": 19.378314971923828, "learning_rate": 1.8013333333333336e-05, "loss": 1.3322, "step": 8456 }, { "epoch": 67.656, "grad_norm": 50.92955017089844, "learning_rate": 1.8008888888888887e-05, "loss": 1.4466, "step": 8457 }, { "epoch": 67.664, "grad_norm": 33.750213623046875, "learning_rate": 1.8004444444444446e-05, "loss": 1.0342, "step": 8458 }, { "epoch": 67.672, "grad_norm": 19.565874099731445, "learning_rate": 1.8e-05, "loss": 0.5727, "step": 8459 }, { "epoch": 67.68, "grad_norm": 69.6037368774414, "learning_rate": 1.799555555555556e-05, "loss": 0.6139, "step": 8460 }, { "epoch": 67.688, "grad_norm": 69.04592895507812, "learning_rate": 1.799111111111111e-05, "loss": 0.9426, "step": 8461 }, { "epoch": 67.696, "grad_norm": 21.20772933959961, "learning_rate": 1.798666666666667e-05, "loss": 1.0289, "step": 8462 }, { "epoch": 67.704, "grad_norm": 26.1975154876709, "learning_rate": 1.7982222222222223e-05, "loss": 0.6936, "step": 8463 }, { "epoch": 67.712, "grad_norm": 26.5003719329834, "learning_rate": 1.7977777777777778e-05, "loss": 0.9794, "step": 8464 }, { "epoch": 67.72, "grad_norm": 134.08602905273438, "learning_rate": 1.7973333333333333e-05, "loss": 1.1819, "step": 8465 }, { "epoch": 67.728, "grad_norm": 27.240690231323242, "learning_rate": 1.796888888888889e-05, "loss": 1.2402, "step": 8466 }, { "epoch": 67.736, "grad_norm": 38.672183990478516, "learning_rate": 1.7964444444444446e-05, "loss": 0.7113, "step": 8467 }, { "epoch": 67.744, "grad_norm": 26.368032455444336, "learning_rate": 1.796e-05, "loss": 0.9917, "step": 8468 }, { "epoch": 67.752, "grad_norm": 29.56007957458496, "learning_rate": 1.7955555555555556e-05, "loss": 0.9674, "step": 8469 }, { "epoch": 67.76, "grad_norm": 17.14199447631836, "learning_rate": 1.7951111111111114e-05, "loss": 0.8413, "step": 8470 }, { "epoch": 67.768, "grad_norm": 18.388769149780273, "learning_rate": 1.794666666666667e-05, "loss": 0.6916, "step": 8471 }, { "epoch": 67.776, "grad_norm": 25.636919021606445, "learning_rate": 1.7942222222222224e-05, "loss": 0.7909, "step": 8472 }, { "epoch": 67.784, "grad_norm": 75.01454162597656, "learning_rate": 1.793777777777778e-05, "loss": 1.1168, "step": 8473 }, { "epoch": 67.792, "grad_norm": 25.080692291259766, "learning_rate": 1.7933333333333337e-05, "loss": 0.8235, "step": 8474 }, { "epoch": 67.8, "grad_norm": 20.555282592773438, "learning_rate": 1.7928888888888888e-05, "loss": 0.9218, "step": 8475 }, { "epoch": 67.808, "grad_norm": 54.01340866088867, "learning_rate": 1.7924444444444446e-05, "loss": 0.9355, "step": 8476 }, { "epoch": 67.816, "grad_norm": 28.48931884765625, "learning_rate": 1.792e-05, "loss": 0.9878, "step": 8477 }, { "epoch": 67.824, "grad_norm": 27.453645706176758, "learning_rate": 1.7915555555555556e-05, "loss": 0.7801, "step": 8478 }, { "epoch": 67.832, "grad_norm": 26.76273536682129, "learning_rate": 1.791111111111111e-05, "loss": 0.8378, "step": 8479 }, { "epoch": 67.84, "grad_norm": 15.890838623046875, "learning_rate": 1.790666666666667e-05, "loss": 0.7866, "step": 8480 }, { "epoch": 67.848, "grad_norm": 27.728931427001953, "learning_rate": 1.7902222222222224e-05, "loss": 0.9607, "step": 8481 }, { "epoch": 67.856, "grad_norm": 43.17155075073242, "learning_rate": 1.789777777777778e-05, "loss": 1.0395, "step": 8482 }, { "epoch": 67.864, "grad_norm": 22.819618225097656, "learning_rate": 1.7893333333333334e-05, "loss": 0.7537, "step": 8483 }, { "epoch": 67.872, "grad_norm": 73.18134307861328, "learning_rate": 1.788888888888889e-05, "loss": 0.6762, "step": 8484 }, { "epoch": 67.88, "grad_norm": 29.71888542175293, "learning_rate": 1.7884444444444447e-05, "loss": 1.3763, "step": 8485 }, { "epoch": 67.888, "grad_norm": 18.06717300415039, "learning_rate": 1.7879999999999998e-05, "loss": 0.5914, "step": 8486 }, { "epoch": 67.896, "grad_norm": 20.698904037475586, "learning_rate": 1.7875555555555556e-05, "loss": 0.6512, "step": 8487 }, { "epoch": 67.904, "grad_norm": 23.97759437561035, "learning_rate": 1.787111111111111e-05, "loss": 0.8846, "step": 8488 }, { "epoch": 67.912, "grad_norm": 17.234241485595703, "learning_rate": 1.7866666666666666e-05, "loss": 0.4919, "step": 8489 }, { "epoch": 67.92, "grad_norm": 30.54853630065918, "learning_rate": 1.786222222222222e-05, "loss": 0.9751, "step": 8490 }, { "epoch": 67.928, "grad_norm": 15.968672752380371, "learning_rate": 1.785777777777778e-05, "loss": 0.8533, "step": 8491 }, { "epoch": 67.936, "grad_norm": 25.086811065673828, "learning_rate": 1.7853333333333334e-05, "loss": 0.7912, "step": 8492 }, { "epoch": 67.944, "grad_norm": 27.656423568725586, "learning_rate": 1.784888888888889e-05, "loss": 1.1034, "step": 8493 }, { "epoch": 67.952, "grad_norm": 88.76274108886719, "learning_rate": 1.7844444444444444e-05, "loss": 0.9038, "step": 8494 }, { "epoch": 67.96, "grad_norm": 51.757911682128906, "learning_rate": 1.7840000000000002e-05, "loss": 0.7239, "step": 8495 }, { "epoch": 67.968, "grad_norm": 16.950416564941406, "learning_rate": 1.7835555555555557e-05, "loss": 0.8689, "step": 8496 }, { "epoch": 67.976, "grad_norm": 15.675891876220703, "learning_rate": 1.783111111111111e-05, "loss": 1.0368, "step": 8497 }, { "epoch": 67.984, "grad_norm": 20.50631332397461, "learning_rate": 1.7826666666666667e-05, "loss": 1.0245, "step": 8498 }, { "epoch": 67.992, "grad_norm": 35.409305572509766, "learning_rate": 1.7822222222222225e-05, "loss": 0.7691, "step": 8499 }, { "epoch": 68.0, "grad_norm": 16.640548706054688, "learning_rate": 1.7817777777777776e-05, "loss": 0.9299, "step": 8500 }, { "epoch": 68.0, "eval_loss": 1.0379208326339722, "eval_map": 0.4472, "eval_map_50": 0.811, "eval_map_75": 0.4368, "eval_map_Coverall": 0.6322, "eval_map_Face_Shield": 0.5563, "eval_map_Gloves": 0.3676, "eval_map_Goggles": 0.234, "eval_map_Mask": 0.4457, "eval_map_large": 0.6716, "eval_map_medium": 0.3075, "eval_map_small": 0.3669, "eval_mar_1": 0.3305, "eval_mar_10": 0.5798, "eval_mar_100": 0.5902, "eval_mar_100_Coverall": 0.7356, "eval_mar_100_Face_Shield": 0.7118, "eval_mar_100_Gloves": 0.4787, "eval_mar_100_Goggles": 0.4906, "eval_mar_100_Mask": 0.5346, "eval_mar_large": 0.7706, "eval_mar_medium": 0.4414, "eval_mar_small": 0.4473, "eval_runtime": 0.9382, "eval_samples_per_second": 30.909, "eval_steps_per_second": 2.132, "step": 8500 }, { "epoch": 68.008, "grad_norm": 21.109052658081055, "learning_rate": 1.7813333333333334e-05, "loss": 0.7189, "step": 8501 }, { "epoch": 68.016, "grad_norm": 55.07770538330078, "learning_rate": 1.780888888888889e-05, "loss": 0.6742, "step": 8502 }, { "epoch": 68.024, "grad_norm": 20.3697452545166, "learning_rate": 1.7804444444444444e-05, "loss": 1.0053, "step": 8503 }, { "epoch": 68.032, "grad_norm": 13.066651344299316, "learning_rate": 1.78e-05, "loss": 0.7471, "step": 8504 }, { "epoch": 68.04, "grad_norm": 45.72903823852539, "learning_rate": 1.7795555555555557e-05, "loss": 1.1397, "step": 8505 }, { "epoch": 68.048, "grad_norm": 19.640581130981445, "learning_rate": 1.7791111111111112e-05, "loss": 0.8504, "step": 8506 }, { "epoch": 68.056, "grad_norm": 46.13285827636719, "learning_rate": 1.7786666666666667e-05, "loss": 0.7176, "step": 8507 }, { "epoch": 68.064, "grad_norm": 22.03217124938965, "learning_rate": 1.7782222222222222e-05, "loss": 0.6437, "step": 8508 }, { "epoch": 68.072, "grad_norm": 22.88347625732422, "learning_rate": 1.777777777777778e-05, "loss": 0.9651, "step": 8509 }, { "epoch": 68.08, "grad_norm": 28.492725372314453, "learning_rate": 1.7773333333333335e-05, "loss": 0.9688, "step": 8510 }, { "epoch": 68.088, "grad_norm": 35.93935012817383, "learning_rate": 1.776888888888889e-05, "loss": 1.9103, "step": 8511 }, { "epoch": 68.096, "grad_norm": 33.523338317871094, "learning_rate": 1.7764444444444445e-05, "loss": 0.8909, "step": 8512 }, { "epoch": 68.104, "grad_norm": 19.236282348632812, "learning_rate": 1.7760000000000003e-05, "loss": 0.9042, "step": 8513 }, { "epoch": 68.112, "grad_norm": 13.21009635925293, "learning_rate": 1.7755555555555554e-05, "loss": 0.6178, "step": 8514 }, { "epoch": 68.12, "grad_norm": 29.260061264038086, "learning_rate": 1.7751111111111112e-05, "loss": 1.0586, "step": 8515 }, { "epoch": 68.128, "grad_norm": 23.979263305664062, "learning_rate": 1.7746666666666667e-05, "loss": 0.6404, "step": 8516 }, { "epoch": 68.136, "grad_norm": 26.158002853393555, "learning_rate": 1.7742222222222226e-05, "loss": 0.659, "step": 8517 }, { "epoch": 68.144, "grad_norm": 44.19608688354492, "learning_rate": 1.7737777777777777e-05, "loss": 1.074, "step": 8518 }, { "epoch": 68.152, "grad_norm": 15.026436805725098, "learning_rate": 1.7733333333333335e-05, "loss": 0.7938, "step": 8519 }, { "epoch": 68.16, "grad_norm": 67.26373291015625, "learning_rate": 1.772888888888889e-05, "loss": 0.8991, "step": 8520 }, { "epoch": 68.168, "grad_norm": 20.155704498291016, "learning_rate": 1.7724444444444445e-05, "loss": 0.8963, "step": 8521 }, { "epoch": 68.176, "grad_norm": 21.36920738220215, "learning_rate": 1.772e-05, "loss": 0.67, "step": 8522 }, { "epoch": 68.184, "grad_norm": 18.50811004638672, "learning_rate": 1.7715555555555558e-05, "loss": 0.8235, "step": 8523 }, { "epoch": 68.192, "grad_norm": 17.459081649780273, "learning_rate": 1.7711111111111113e-05, "loss": 0.5228, "step": 8524 }, { "epoch": 68.2, "grad_norm": 59.3146858215332, "learning_rate": 1.7706666666666668e-05, "loss": 0.6811, "step": 8525 }, { "epoch": 68.208, "grad_norm": 24.001924514770508, "learning_rate": 1.7702222222222223e-05, "loss": 1.3588, "step": 8526 }, { "epoch": 68.216, "grad_norm": 51.58468246459961, "learning_rate": 1.769777777777778e-05, "loss": 0.9534, "step": 8527 }, { "epoch": 68.224, "grad_norm": 53.36509323120117, "learning_rate": 1.7693333333333336e-05, "loss": 0.7006, "step": 8528 }, { "epoch": 68.232, "grad_norm": 123.390869140625, "learning_rate": 1.768888888888889e-05, "loss": 0.9129, "step": 8529 }, { "epoch": 68.24, "grad_norm": 26.746549606323242, "learning_rate": 1.7684444444444445e-05, "loss": 1.4595, "step": 8530 }, { "epoch": 68.248, "grad_norm": 16.542545318603516, "learning_rate": 1.7680000000000004e-05, "loss": 0.7614, "step": 8531 }, { "epoch": 68.256, "grad_norm": 22.468164443969727, "learning_rate": 1.7675555555555555e-05, "loss": 0.8395, "step": 8532 }, { "epoch": 68.264, "grad_norm": 33.748260498046875, "learning_rate": 1.7671111111111113e-05, "loss": 1.0958, "step": 8533 }, { "epoch": 68.272, "grad_norm": 32.3319206237793, "learning_rate": 1.7666666666666668e-05, "loss": 0.5797, "step": 8534 }, { "epoch": 68.28, "grad_norm": 34.77672576904297, "learning_rate": 1.7662222222222223e-05, "loss": 0.7111, "step": 8535 }, { "epoch": 68.288, "grad_norm": 17.236133575439453, "learning_rate": 1.7657777777777778e-05, "loss": 2.2016, "step": 8536 }, { "epoch": 68.296, "grad_norm": 28.299678802490234, "learning_rate": 1.7653333333333333e-05, "loss": 0.8604, "step": 8537 }, { "epoch": 68.304, "grad_norm": 23.156763076782227, "learning_rate": 1.764888888888889e-05, "loss": 1.4326, "step": 8538 }, { "epoch": 68.312, "grad_norm": 46.82796096801758, "learning_rate": 1.7644444444444446e-05, "loss": 0.9758, "step": 8539 }, { "epoch": 68.32, "grad_norm": 27.33499526977539, "learning_rate": 1.764e-05, "loss": 0.7683, "step": 8540 }, { "epoch": 68.328, "grad_norm": 44.39160919189453, "learning_rate": 1.7635555555555555e-05, "loss": 0.6703, "step": 8541 }, { "epoch": 68.336, "grad_norm": 29.751514434814453, "learning_rate": 1.7631111111111114e-05, "loss": 1.2923, "step": 8542 }, { "epoch": 68.344, "grad_norm": 25.0213623046875, "learning_rate": 1.7626666666666665e-05, "loss": 1.0069, "step": 8543 }, { "epoch": 68.352, "grad_norm": 17.229877471923828, "learning_rate": 1.7622222222222223e-05, "loss": 1.0654, "step": 8544 }, { "epoch": 68.36, "grad_norm": 24.27475357055664, "learning_rate": 1.7617777777777778e-05, "loss": 1.1767, "step": 8545 }, { "epoch": 68.368, "grad_norm": 17.66547966003418, "learning_rate": 1.7613333333333333e-05, "loss": 1.5922, "step": 8546 }, { "epoch": 68.376, "grad_norm": 40.704139709472656, "learning_rate": 1.7608888888888888e-05, "loss": 0.7719, "step": 8547 }, { "epoch": 68.384, "grad_norm": 40.06197738647461, "learning_rate": 1.7604444444444446e-05, "loss": 0.9207, "step": 8548 }, { "epoch": 68.392, "grad_norm": 36.41609573364258, "learning_rate": 1.76e-05, "loss": 1.0154, "step": 8549 }, { "epoch": 68.4, "grad_norm": 16.744060516357422, "learning_rate": 1.7595555555555556e-05, "loss": 1.4222, "step": 8550 }, { "epoch": 68.408, "grad_norm": 31.804601669311523, "learning_rate": 1.759111111111111e-05, "loss": 0.8382, "step": 8551 }, { "epoch": 68.416, "grad_norm": 18.249208450317383, "learning_rate": 1.758666666666667e-05, "loss": 1.1256, "step": 8552 }, { "epoch": 68.424, "grad_norm": 34.15445327758789, "learning_rate": 1.7582222222222224e-05, "loss": 2.6754, "step": 8553 }, { "epoch": 68.432, "grad_norm": 21.332502365112305, "learning_rate": 1.757777777777778e-05, "loss": 0.994, "step": 8554 }, { "epoch": 68.44, "grad_norm": 17.860401153564453, "learning_rate": 1.7573333333333333e-05, "loss": 0.9313, "step": 8555 }, { "epoch": 68.448, "grad_norm": 17.79829978942871, "learning_rate": 1.756888888888889e-05, "loss": 1.1214, "step": 8556 }, { "epoch": 68.456, "grad_norm": 28.65058135986328, "learning_rate": 1.7564444444444443e-05, "loss": 1.0708, "step": 8557 }, { "epoch": 68.464, "grad_norm": 15.011242866516113, "learning_rate": 1.756e-05, "loss": 0.8084, "step": 8558 }, { "epoch": 68.472, "grad_norm": 12.195992469787598, "learning_rate": 1.7555555555555556e-05, "loss": 0.7169, "step": 8559 }, { "epoch": 68.48, "grad_norm": 52.08492660522461, "learning_rate": 1.755111111111111e-05, "loss": 0.5609, "step": 8560 }, { "epoch": 68.488, "grad_norm": 23.581815719604492, "learning_rate": 1.7546666666666666e-05, "loss": 1.1712, "step": 8561 }, { "epoch": 68.496, "grad_norm": 26.08110809326172, "learning_rate": 1.7542222222222224e-05, "loss": 1.0799, "step": 8562 }, { "epoch": 68.504, "grad_norm": 33.56792068481445, "learning_rate": 1.753777777777778e-05, "loss": 0.85, "step": 8563 }, { "epoch": 68.512, "grad_norm": 21.30628204345703, "learning_rate": 1.7533333333333334e-05, "loss": 0.8649, "step": 8564 }, { "epoch": 68.52, "grad_norm": 18.15224266052246, "learning_rate": 1.752888888888889e-05, "loss": 1.2957, "step": 8565 }, { "epoch": 68.528, "grad_norm": 33.9412841796875, "learning_rate": 1.7524444444444447e-05, "loss": 1.2038, "step": 8566 }, { "epoch": 68.536, "grad_norm": 15.724703788757324, "learning_rate": 1.752e-05, "loss": 0.9935, "step": 8567 }, { "epoch": 68.544, "grad_norm": 27.358318328857422, "learning_rate": 1.7515555555555557e-05, "loss": 0.7084, "step": 8568 }, { "epoch": 68.552, "grad_norm": 30.159839630126953, "learning_rate": 1.751111111111111e-05, "loss": 0.828, "step": 8569 }, { "epoch": 68.56, "grad_norm": 28.24570083618164, "learning_rate": 1.750666666666667e-05, "loss": 0.6645, "step": 8570 }, { "epoch": 68.568, "grad_norm": 20.09062385559082, "learning_rate": 1.750222222222222e-05, "loss": 0.8272, "step": 8571 }, { "epoch": 68.576, "grad_norm": 18.239118576049805, "learning_rate": 1.749777777777778e-05, "loss": 0.8644, "step": 8572 }, { "epoch": 68.584, "grad_norm": 25.329212188720703, "learning_rate": 1.7493333333333334e-05, "loss": 0.99, "step": 8573 }, { "epoch": 68.592, "grad_norm": 14.88206672668457, "learning_rate": 1.7488888888888892e-05, "loss": 0.8999, "step": 8574 }, { "epoch": 68.6, "grad_norm": 17.69241714477539, "learning_rate": 1.7484444444444444e-05, "loss": 0.7552, "step": 8575 }, { "epoch": 68.608, "grad_norm": 21.69331932067871, "learning_rate": 1.7480000000000002e-05, "loss": 1.0251, "step": 8576 }, { "epoch": 68.616, "grad_norm": 36.157955169677734, "learning_rate": 1.7475555555555557e-05, "loss": 1.0635, "step": 8577 }, { "epoch": 68.624, "grad_norm": 21.080286026000977, "learning_rate": 1.7471111111111112e-05, "loss": 0.8609, "step": 8578 }, { "epoch": 68.632, "grad_norm": 37.73291778564453, "learning_rate": 1.7466666666666667e-05, "loss": 1.1745, "step": 8579 }, { "epoch": 68.64, "grad_norm": 23.387598037719727, "learning_rate": 1.7462222222222225e-05, "loss": 0.8152, "step": 8580 }, { "epoch": 68.648, "grad_norm": 21.755260467529297, "learning_rate": 1.745777777777778e-05, "loss": 0.8265, "step": 8581 }, { "epoch": 68.656, "grad_norm": 42.371219635009766, "learning_rate": 1.7453333333333335e-05, "loss": 0.6555, "step": 8582 }, { "epoch": 68.664, "grad_norm": 26.389638900756836, "learning_rate": 1.744888888888889e-05, "loss": 0.9593, "step": 8583 }, { "epoch": 68.672, "grad_norm": 26.626859664916992, "learning_rate": 1.7444444444444448e-05, "loss": 0.9851, "step": 8584 }, { "epoch": 68.68, "grad_norm": 22.01044464111328, "learning_rate": 1.7440000000000002e-05, "loss": 0.9821, "step": 8585 }, { "epoch": 68.688, "grad_norm": 27.981857299804688, "learning_rate": 1.7435555555555557e-05, "loss": 0.7924, "step": 8586 }, { "epoch": 68.696, "grad_norm": 19.521881103515625, "learning_rate": 1.7431111111111112e-05, "loss": 0.7503, "step": 8587 }, { "epoch": 68.704, "grad_norm": 16.143417358398438, "learning_rate": 1.7426666666666667e-05, "loss": 0.6292, "step": 8588 }, { "epoch": 68.712, "grad_norm": 33.13394546508789, "learning_rate": 1.7422222222222222e-05, "loss": 0.5634, "step": 8589 }, { "epoch": 68.72, "grad_norm": 16.198514938354492, "learning_rate": 1.7417777777777777e-05, "loss": 0.8668, "step": 8590 }, { "epoch": 68.728, "grad_norm": 100.25357818603516, "learning_rate": 1.7413333333333335e-05, "loss": 1.0554, "step": 8591 }, { "epoch": 68.736, "grad_norm": 22.30512046813965, "learning_rate": 1.740888888888889e-05, "loss": 1.064, "step": 8592 }, { "epoch": 68.744, "grad_norm": 24.244781494140625, "learning_rate": 1.7404444444444445e-05, "loss": 1.0206, "step": 8593 }, { "epoch": 68.752, "grad_norm": 30.345945358276367, "learning_rate": 1.74e-05, "loss": 1.0014, "step": 8594 }, { "epoch": 68.76, "grad_norm": 35.41468048095703, "learning_rate": 1.7395555555555558e-05, "loss": 1.0311, "step": 8595 }, { "epoch": 68.768, "grad_norm": 26.12664794921875, "learning_rate": 1.739111111111111e-05, "loss": 1.0508, "step": 8596 }, { "epoch": 68.776, "grad_norm": 30.92913818359375, "learning_rate": 1.7386666666666667e-05, "loss": 0.7363, "step": 8597 }, { "epoch": 68.784, "grad_norm": 61.49296951293945, "learning_rate": 1.7382222222222222e-05, "loss": 0.8703, "step": 8598 }, { "epoch": 68.792, "grad_norm": 24.43450927734375, "learning_rate": 1.737777777777778e-05, "loss": 0.8967, "step": 8599 }, { "epoch": 68.8, "grad_norm": 36.09532928466797, "learning_rate": 1.7373333333333332e-05, "loss": 0.9634, "step": 8600 }, { "epoch": 68.808, "grad_norm": 28.421480178833008, "learning_rate": 1.736888888888889e-05, "loss": 0.704, "step": 8601 }, { "epoch": 68.816, "grad_norm": 21.066064834594727, "learning_rate": 1.7364444444444445e-05, "loss": 0.9525, "step": 8602 }, { "epoch": 68.824, "grad_norm": 19.747272491455078, "learning_rate": 1.736e-05, "loss": 0.8682, "step": 8603 }, { "epoch": 68.832, "grad_norm": 37.50741195678711, "learning_rate": 1.7355555555555555e-05, "loss": 1.7389, "step": 8604 }, { "epoch": 68.84, "grad_norm": 161.91847229003906, "learning_rate": 1.7351111111111113e-05, "loss": 1.7578, "step": 8605 }, { "epoch": 68.848, "grad_norm": 23.715805053710938, "learning_rate": 1.7346666666666668e-05, "loss": 0.8081, "step": 8606 }, { "epoch": 68.856, "grad_norm": 28.386600494384766, "learning_rate": 1.7342222222222223e-05, "loss": 1.5229, "step": 8607 }, { "epoch": 68.864, "grad_norm": 17.06562614440918, "learning_rate": 1.7337777777777777e-05, "loss": 1.1784, "step": 8608 }, { "epoch": 68.872, "grad_norm": 421.9546203613281, "learning_rate": 1.7333333333333336e-05, "loss": 1.0917, "step": 8609 }, { "epoch": 68.88, "grad_norm": 24.80728530883789, "learning_rate": 1.732888888888889e-05, "loss": 0.9301, "step": 8610 }, { "epoch": 68.888, "grad_norm": 20.366851806640625, "learning_rate": 1.7324444444444445e-05, "loss": 0.8838, "step": 8611 }, { "epoch": 68.896, "grad_norm": 23.56787109375, "learning_rate": 1.732e-05, "loss": 0.9996, "step": 8612 }, { "epoch": 68.904, "grad_norm": 43.93215560913086, "learning_rate": 1.731555555555556e-05, "loss": 0.8318, "step": 8613 }, { "epoch": 68.912, "grad_norm": 18.09685707092285, "learning_rate": 1.731111111111111e-05, "loss": 0.805, "step": 8614 }, { "epoch": 68.92, "grad_norm": 29.495065689086914, "learning_rate": 1.7306666666666668e-05, "loss": 0.5713, "step": 8615 }, { "epoch": 68.928, "grad_norm": 14.701478958129883, "learning_rate": 1.7302222222222223e-05, "loss": 0.6995, "step": 8616 }, { "epoch": 68.936, "grad_norm": 33.725563049316406, "learning_rate": 1.7297777777777778e-05, "loss": 0.8311, "step": 8617 }, { "epoch": 68.944, "grad_norm": 19.541025161743164, "learning_rate": 1.7293333333333333e-05, "loss": 0.7308, "step": 8618 }, { "epoch": 68.952, "grad_norm": 47.573360443115234, "learning_rate": 1.728888888888889e-05, "loss": 0.788, "step": 8619 }, { "epoch": 68.96, "grad_norm": 15.937369346618652, "learning_rate": 1.7284444444444446e-05, "loss": 1.0792, "step": 8620 }, { "epoch": 68.968, "grad_norm": 29.69386100769043, "learning_rate": 1.728e-05, "loss": 1.062, "step": 8621 }, { "epoch": 68.976, "grad_norm": 24.372600555419922, "learning_rate": 1.7275555555555555e-05, "loss": 0.791, "step": 8622 }, { "epoch": 68.984, "grad_norm": 12.01014232635498, "learning_rate": 1.7271111111111114e-05, "loss": 0.7974, "step": 8623 }, { "epoch": 68.992, "grad_norm": 21.387784957885742, "learning_rate": 1.726666666666667e-05, "loss": 1.0304, "step": 8624 }, { "epoch": 69.0, "grad_norm": 27.584074020385742, "learning_rate": 1.7262222222222223e-05, "loss": 1.0392, "step": 8625 }, { "epoch": 69.0, "eval_loss": 0.989592432975769, "eval_map": 0.4309, "eval_map_50": 0.7608, "eval_map_75": 0.3934, "eval_map_Coverall": 0.6337, "eval_map_Face_Shield": 0.5465, "eval_map_Gloves": 0.3701, "eval_map_Goggles": 0.185, "eval_map_Mask": 0.4194, "eval_map_large": 0.6468, "eval_map_medium": 0.3258, "eval_map_small": 0.4101, "eval_mar_1": 0.3379, "eval_mar_10": 0.5819, "eval_mar_100": 0.5991, "eval_mar_100_Coverall": 0.7356, "eval_mar_100_Face_Shield": 0.7471, "eval_mar_100_Gloves": 0.4885, "eval_mar_100_Goggles": 0.5031, "eval_mar_100_Mask": 0.5212, "eval_mar_large": 0.7537, "eval_mar_medium": 0.4787, "eval_mar_small": 0.5079, "eval_runtime": 0.9278, "eval_samples_per_second": 31.255, "eval_steps_per_second": 2.156, "step": 8625 }, { "epoch": 69.008, "grad_norm": 17.499990463256836, "learning_rate": 1.7257777777777778e-05, "loss": 0.7021, "step": 8626 }, { "epoch": 69.016, "grad_norm": 14.731825828552246, "learning_rate": 1.7253333333333336e-05, "loss": 1.1756, "step": 8627 }, { "epoch": 69.024, "grad_norm": 28.385011672973633, "learning_rate": 1.7248888888888888e-05, "loss": 1.0572, "step": 8628 }, { "epoch": 69.032, "grad_norm": 16.62505340576172, "learning_rate": 1.7244444444444446e-05, "loss": 1.0323, "step": 8629 }, { "epoch": 69.04, "grad_norm": 31.88622283935547, "learning_rate": 1.724e-05, "loss": 0.7109, "step": 8630 }, { "epoch": 69.048, "grad_norm": 45.65183639526367, "learning_rate": 1.723555555555556e-05, "loss": 0.9192, "step": 8631 }, { "epoch": 69.056, "grad_norm": 19.74309539794922, "learning_rate": 1.723111111111111e-05, "loss": 0.7053, "step": 8632 }, { "epoch": 69.064, "grad_norm": 18.474933624267578, "learning_rate": 1.722666666666667e-05, "loss": 0.9937, "step": 8633 }, { "epoch": 69.072, "grad_norm": 24.640676498413086, "learning_rate": 1.7222222222222224e-05, "loss": 0.8537, "step": 8634 }, { "epoch": 69.08, "grad_norm": 17.26175880432129, "learning_rate": 1.721777777777778e-05, "loss": 0.6803, "step": 8635 }, { "epoch": 69.088, "grad_norm": 24.29170036315918, "learning_rate": 1.7213333333333333e-05, "loss": 1.0268, "step": 8636 }, { "epoch": 69.096, "grad_norm": 29.02078628540039, "learning_rate": 1.720888888888889e-05, "loss": 0.5836, "step": 8637 }, { "epoch": 69.104, "grad_norm": 18.687484741210938, "learning_rate": 1.7204444444444446e-05, "loss": 0.7411, "step": 8638 }, { "epoch": 69.112, "grad_norm": 16.515625, "learning_rate": 1.7199999999999998e-05, "loss": 0.8684, "step": 8639 }, { "epoch": 69.12, "grad_norm": 22.477733612060547, "learning_rate": 1.7195555555555556e-05, "loss": 0.9554, "step": 8640 }, { "epoch": 69.128, "grad_norm": 38.00128936767578, "learning_rate": 1.719111111111111e-05, "loss": 0.9743, "step": 8641 }, { "epoch": 69.136, "grad_norm": 25.738515853881836, "learning_rate": 1.718666666666667e-05, "loss": 1.1066, "step": 8642 }, { "epoch": 69.144, "grad_norm": 48.133644104003906, "learning_rate": 1.718222222222222e-05, "loss": 0.793, "step": 8643 }, { "epoch": 69.152, "grad_norm": 14.48824691772461, "learning_rate": 1.717777777777778e-05, "loss": 0.9599, "step": 8644 }, { "epoch": 69.16, "grad_norm": 32.36060333251953, "learning_rate": 1.7173333333333334e-05, "loss": 0.9123, "step": 8645 }, { "epoch": 69.168, "grad_norm": 15.9877347946167, "learning_rate": 1.716888888888889e-05, "loss": 0.9735, "step": 8646 }, { "epoch": 69.176, "grad_norm": 23.551006317138672, "learning_rate": 1.7164444444444443e-05, "loss": 1.0822, "step": 8647 }, { "epoch": 69.184, "grad_norm": 18.85470962524414, "learning_rate": 1.7160000000000002e-05, "loss": 1.0882, "step": 8648 }, { "epoch": 69.192, "grad_norm": 13.524386405944824, "learning_rate": 1.7155555555555557e-05, "loss": 1.6123, "step": 8649 }, { "epoch": 69.2, "grad_norm": 30.652280807495117, "learning_rate": 1.715111111111111e-05, "loss": 0.7948, "step": 8650 }, { "epoch": 69.208, "grad_norm": 42.452030181884766, "learning_rate": 1.7146666666666666e-05, "loss": 0.9415, "step": 8651 }, { "epoch": 69.216, "grad_norm": 61.27506637573242, "learning_rate": 1.7142222222222224e-05, "loss": 1.4763, "step": 8652 }, { "epoch": 69.224, "grad_norm": 19.691003799438477, "learning_rate": 1.7137777777777776e-05, "loss": 1.0039, "step": 8653 }, { "epoch": 69.232, "grad_norm": 13.295628547668457, "learning_rate": 1.7133333333333334e-05, "loss": 0.7446, "step": 8654 }, { "epoch": 69.24, "grad_norm": 46.45072937011719, "learning_rate": 1.712888888888889e-05, "loss": 0.9778, "step": 8655 }, { "epoch": 69.248, "grad_norm": 13.598773002624512, "learning_rate": 1.7124444444444447e-05, "loss": 1.0307, "step": 8656 }, { "epoch": 69.256, "grad_norm": 52.361568450927734, "learning_rate": 1.712e-05, "loss": 0.9581, "step": 8657 }, { "epoch": 69.264, "grad_norm": 24.90450096130371, "learning_rate": 1.7115555555555557e-05, "loss": 0.6901, "step": 8658 }, { "epoch": 69.272, "grad_norm": 80.0473861694336, "learning_rate": 1.7111111111111112e-05, "loss": 1.2069, "step": 8659 }, { "epoch": 69.28, "grad_norm": 17.116369247436523, "learning_rate": 1.7106666666666667e-05, "loss": 0.7705, "step": 8660 }, { "epoch": 69.288, "grad_norm": 26.875782012939453, "learning_rate": 1.710222222222222e-05, "loss": 1.0195, "step": 8661 }, { "epoch": 69.296, "grad_norm": 70.26876068115234, "learning_rate": 1.709777777777778e-05, "loss": 0.8175, "step": 8662 }, { "epoch": 69.304, "grad_norm": 19.31991195678711, "learning_rate": 1.7093333333333335e-05, "loss": 0.9782, "step": 8663 }, { "epoch": 69.312, "grad_norm": 18.435956954956055, "learning_rate": 1.708888888888889e-05, "loss": 0.8931, "step": 8664 }, { "epoch": 69.32, "grad_norm": 18.323596954345703, "learning_rate": 1.7084444444444444e-05, "loss": 0.7155, "step": 8665 }, { "epoch": 69.328, "grad_norm": 17.698762893676758, "learning_rate": 1.7080000000000002e-05, "loss": 1.0064, "step": 8666 }, { "epoch": 69.336, "grad_norm": 27.79183006286621, "learning_rate": 1.7075555555555557e-05, "loss": 0.7832, "step": 8667 }, { "epoch": 69.344, "grad_norm": 17.590417861938477, "learning_rate": 1.7071111111111112e-05, "loss": 0.7608, "step": 8668 }, { "epoch": 69.352, "grad_norm": 18.14989471435547, "learning_rate": 1.7066666666666667e-05, "loss": 0.6712, "step": 8669 }, { "epoch": 69.36, "grad_norm": 25.59176254272461, "learning_rate": 1.7062222222222225e-05, "loss": 1.8232, "step": 8670 }, { "epoch": 69.368, "grad_norm": 154.2509307861328, "learning_rate": 1.7057777777777777e-05, "loss": 1.1013, "step": 8671 }, { "epoch": 69.376, "grad_norm": 34.82417678833008, "learning_rate": 1.7053333333333335e-05, "loss": 0.8651, "step": 8672 }, { "epoch": 69.384, "grad_norm": 18.604902267456055, "learning_rate": 1.704888888888889e-05, "loss": 0.6534, "step": 8673 }, { "epoch": 69.392, "grad_norm": 18.754898071289062, "learning_rate": 1.7044444444444445e-05, "loss": 0.8424, "step": 8674 }, { "epoch": 69.4, "grad_norm": 18.473379135131836, "learning_rate": 1.704e-05, "loss": 0.9766, "step": 8675 }, { "epoch": 69.408, "grad_norm": 27.837316513061523, "learning_rate": 1.7035555555555558e-05, "loss": 1.0792, "step": 8676 }, { "epoch": 69.416, "grad_norm": 24.043245315551758, "learning_rate": 1.7031111111111113e-05, "loss": 0.6665, "step": 8677 }, { "epoch": 69.424, "grad_norm": 27.908653259277344, "learning_rate": 1.7026666666666667e-05, "loss": 0.936, "step": 8678 }, { "epoch": 69.432, "grad_norm": 36.85116958618164, "learning_rate": 1.7022222222222222e-05, "loss": 1.7064, "step": 8679 }, { "epoch": 69.44, "grad_norm": 43.29554748535156, "learning_rate": 1.701777777777778e-05, "loss": 0.978, "step": 8680 }, { "epoch": 69.448, "grad_norm": 19.73858070373535, "learning_rate": 1.7013333333333335e-05, "loss": 0.8645, "step": 8681 }, { "epoch": 69.456, "grad_norm": 30.732934951782227, "learning_rate": 1.700888888888889e-05, "loss": 1.3939, "step": 8682 }, { "epoch": 69.464, "grad_norm": 18.148258209228516, "learning_rate": 1.7004444444444445e-05, "loss": 0.7271, "step": 8683 }, { "epoch": 69.472, "grad_norm": 24.86313247680664, "learning_rate": 1.7000000000000003e-05, "loss": 0.7446, "step": 8684 }, { "epoch": 69.48, "grad_norm": 32.97421646118164, "learning_rate": 1.6995555555555555e-05, "loss": 0.6902, "step": 8685 }, { "epoch": 69.488, "grad_norm": 29.092336654663086, "learning_rate": 1.6991111111111113e-05, "loss": 0.6862, "step": 8686 }, { "epoch": 69.496, "grad_norm": 28.964506149291992, "learning_rate": 1.6986666666666668e-05, "loss": 0.7335, "step": 8687 }, { "epoch": 69.504, "grad_norm": 26.464323043823242, "learning_rate": 1.6982222222222226e-05, "loss": 0.7187, "step": 8688 }, { "epoch": 69.512, "grad_norm": 14.756009101867676, "learning_rate": 1.6977777777777777e-05, "loss": 0.7432, "step": 8689 }, { "epoch": 69.52, "grad_norm": 24.409839630126953, "learning_rate": 1.6973333333333336e-05, "loss": 1.2105, "step": 8690 }, { "epoch": 69.528, "grad_norm": 27.052148818969727, "learning_rate": 1.696888888888889e-05, "loss": 0.6351, "step": 8691 }, { "epoch": 69.536, "grad_norm": 26.413965225219727, "learning_rate": 1.6964444444444445e-05, "loss": 1.1505, "step": 8692 }, { "epoch": 69.544, "grad_norm": 26.016616821289062, "learning_rate": 1.696e-05, "loss": 1.1409, "step": 8693 }, { "epoch": 69.552, "grad_norm": 19.15420150756836, "learning_rate": 1.6955555555555555e-05, "loss": 1.0099, "step": 8694 }, { "epoch": 69.56, "grad_norm": 21.001544952392578, "learning_rate": 1.6951111111111113e-05, "loss": 0.7804, "step": 8695 }, { "epoch": 69.568, "grad_norm": 14.74337100982666, "learning_rate": 1.6946666666666665e-05, "loss": 0.7736, "step": 8696 }, { "epoch": 69.576, "grad_norm": 19.15567398071289, "learning_rate": 1.6942222222222223e-05, "loss": 1.0765, "step": 8697 }, { "epoch": 69.584, "grad_norm": 26.95336151123047, "learning_rate": 1.6937777777777778e-05, "loss": 1.043, "step": 8698 }, { "epoch": 69.592, "grad_norm": 19.227615356445312, "learning_rate": 1.6933333333333333e-05, "loss": 1.0098, "step": 8699 }, { "epoch": 69.6, "grad_norm": 35.491146087646484, "learning_rate": 1.6928888888888888e-05, "loss": 0.877, "step": 8700 }, { "epoch": 69.608, "grad_norm": 23.852148056030273, "learning_rate": 1.6924444444444446e-05, "loss": 1.814, "step": 8701 }, { "epoch": 69.616, "grad_norm": 16.980850219726562, "learning_rate": 1.692e-05, "loss": 0.5641, "step": 8702 }, { "epoch": 69.624, "grad_norm": 23.992597579956055, "learning_rate": 1.6915555555555555e-05, "loss": 1.8146, "step": 8703 }, { "epoch": 69.632, "grad_norm": 17.599483489990234, "learning_rate": 1.691111111111111e-05, "loss": 0.6851, "step": 8704 }, { "epoch": 69.64, "grad_norm": 12.612911224365234, "learning_rate": 1.690666666666667e-05, "loss": 1.005, "step": 8705 }, { "epoch": 69.648, "grad_norm": 25.121479034423828, "learning_rate": 1.6902222222222223e-05, "loss": 0.7453, "step": 8706 }, { "epoch": 69.656, "grad_norm": 22.454544067382812, "learning_rate": 1.6897777777777778e-05, "loss": 0.6698, "step": 8707 }, { "epoch": 69.664, "grad_norm": 19.054174423217773, "learning_rate": 1.6893333333333333e-05, "loss": 0.5972, "step": 8708 }, { "epoch": 69.672, "grad_norm": 50.5824089050293, "learning_rate": 1.688888888888889e-05, "loss": 0.675, "step": 8709 }, { "epoch": 69.68, "grad_norm": 18.813426971435547, "learning_rate": 1.6884444444444443e-05, "loss": 1.5663, "step": 8710 }, { "epoch": 69.688, "grad_norm": 42.85094451904297, "learning_rate": 1.688e-05, "loss": 0.8559, "step": 8711 }, { "epoch": 69.696, "grad_norm": 25.24781036376953, "learning_rate": 1.6875555555555556e-05, "loss": 0.7995, "step": 8712 }, { "epoch": 69.704, "grad_norm": 23.31064796447754, "learning_rate": 1.6871111111111114e-05, "loss": 0.8929, "step": 8713 }, { "epoch": 69.712, "grad_norm": 23.95000648498535, "learning_rate": 1.6866666666666666e-05, "loss": 1.0104, "step": 8714 }, { "epoch": 69.72, "grad_norm": 48.14217758178711, "learning_rate": 1.6862222222222224e-05, "loss": 0.6188, "step": 8715 }, { "epoch": 69.728, "grad_norm": 65.30294799804688, "learning_rate": 1.685777777777778e-05, "loss": 0.7986, "step": 8716 }, { "epoch": 69.736, "grad_norm": 18.619171142578125, "learning_rate": 1.6853333333333333e-05, "loss": 0.7597, "step": 8717 }, { "epoch": 69.744, "grad_norm": 25.319643020629883, "learning_rate": 1.684888888888889e-05, "loss": 0.7627, "step": 8718 }, { "epoch": 69.752, "grad_norm": 15.387252807617188, "learning_rate": 1.6844444444444447e-05, "loss": 0.7475, "step": 8719 }, { "epoch": 69.76, "grad_norm": 39.433372497558594, "learning_rate": 1.684e-05, "loss": 1.1558, "step": 8720 }, { "epoch": 69.768, "grad_norm": 17.709535598754883, "learning_rate": 1.6835555555555556e-05, "loss": 1.0994, "step": 8721 }, { "epoch": 69.776, "grad_norm": 16.884790420532227, "learning_rate": 1.683111111111111e-05, "loss": 0.8683, "step": 8722 }, { "epoch": 69.784, "grad_norm": 22.756834030151367, "learning_rate": 1.682666666666667e-05, "loss": 0.958, "step": 8723 }, { "epoch": 69.792, "grad_norm": 23.098360061645508, "learning_rate": 1.6822222222222224e-05, "loss": 0.7291, "step": 8724 }, { "epoch": 69.8, "grad_norm": 53.524635314941406, "learning_rate": 1.681777777777778e-05, "loss": 0.7928, "step": 8725 }, { "epoch": 69.808, "grad_norm": 22.23763656616211, "learning_rate": 1.6813333333333334e-05, "loss": 1.1443, "step": 8726 }, { "epoch": 69.816, "grad_norm": 36.43678665161133, "learning_rate": 1.6808888888888892e-05, "loss": 0.9432, "step": 8727 }, { "epoch": 69.824, "grad_norm": 35.886268615722656, "learning_rate": 1.6804444444444444e-05, "loss": 0.707, "step": 8728 }, { "epoch": 69.832, "grad_norm": 21.5304012298584, "learning_rate": 1.6800000000000002e-05, "loss": 0.6386, "step": 8729 }, { "epoch": 69.84, "grad_norm": 20.506532669067383, "learning_rate": 1.6795555555555557e-05, "loss": 1.1312, "step": 8730 }, { "epoch": 69.848, "grad_norm": 40.36122131347656, "learning_rate": 1.679111111111111e-05, "loss": 0.9711, "step": 8731 }, { "epoch": 69.856, "grad_norm": 24.727815628051758, "learning_rate": 1.6786666666666666e-05, "loss": 0.8396, "step": 8732 }, { "epoch": 69.864, "grad_norm": 24.034238815307617, "learning_rate": 1.6782222222222225e-05, "loss": 1.1863, "step": 8733 }, { "epoch": 69.872, "grad_norm": 19.500083923339844, "learning_rate": 1.677777777777778e-05, "loss": 0.844, "step": 8734 }, { "epoch": 69.88, "grad_norm": 17.24486541748047, "learning_rate": 1.6773333333333334e-05, "loss": 0.8604, "step": 8735 }, { "epoch": 69.888, "grad_norm": 15.407835006713867, "learning_rate": 1.676888888888889e-05, "loss": 0.9707, "step": 8736 }, { "epoch": 69.896, "grad_norm": 24.851821899414062, "learning_rate": 1.6764444444444447e-05, "loss": 0.9404, "step": 8737 }, { "epoch": 69.904, "grad_norm": 49.8315315246582, "learning_rate": 1.6760000000000002e-05, "loss": 0.7273, "step": 8738 }, { "epoch": 69.912, "grad_norm": 14.807206153869629, "learning_rate": 1.6755555555555557e-05, "loss": 0.6875, "step": 8739 }, { "epoch": 69.92, "grad_norm": 19.059139251708984, "learning_rate": 1.6751111111111112e-05, "loss": 0.646, "step": 8740 }, { "epoch": 69.928, "grad_norm": 15.509918212890625, "learning_rate": 1.674666666666667e-05, "loss": 1.0505, "step": 8741 }, { "epoch": 69.936, "grad_norm": 56.59029006958008, "learning_rate": 1.674222222222222e-05, "loss": 0.9877, "step": 8742 }, { "epoch": 69.944, "grad_norm": 27.830997467041016, "learning_rate": 1.6737777777777776e-05, "loss": 0.8637, "step": 8743 }, { "epoch": 69.952, "grad_norm": 56.83767318725586, "learning_rate": 1.6733333333333335e-05, "loss": 0.9182, "step": 8744 }, { "epoch": 69.96, "grad_norm": 28.884109497070312, "learning_rate": 1.672888888888889e-05, "loss": 1.0027, "step": 8745 }, { "epoch": 69.968, "grad_norm": 15.111788749694824, "learning_rate": 1.6724444444444444e-05, "loss": 1.0822, "step": 8746 }, { "epoch": 69.976, "grad_norm": 24.79206657409668, "learning_rate": 1.672e-05, "loss": 1.1981, "step": 8747 }, { "epoch": 69.984, "grad_norm": 13.116559982299805, "learning_rate": 1.6715555555555557e-05, "loss": 0.6386, "step": 8748 }, { "epoch": 69.992, "grad_norm": 13.90320873260498, "learning_rate": 1.6711111111111112e-05, "loss": 0.6469, "step": 8749 }, { "epoch": 70.0, "grad_norm": 41.040199279785156, "learning_rate": 1.6706666666666667e-05, "loss": 2.844, "step": 8750 }, { "epoch": 70.0, "eval_loss": 0.9956648349761963, "eval_map": 0.4418, "eval_map_50": 0.7765, "eval_map_75": 0.4468, "eval_map_Coverall": 0.6531, "eval_map_Face_Shield": 0.5262, "eval_map_Gloves": 0.3543, "eval_map_Goggles": 0.2358, "eval_map_Mask": 0.4396, "eval_map_large": 0.6581, "eval_map_medium": 0.3477, "eval_map_small": 0.3379, "eval_mar_1": 0.3414, "eval_mar_10": 0.5828, "eval_mar_100": 0.5944, "eval_mar_100_Coverall": 0.7556, "eval_mar_100_Face_Shield": 0.7059, "eval_mar_100_Gloves": 0.4984, "eval_mar_100_Goggles": 0.4969, "eval_mar_100_Mask": 0.5154, "eval_mar_large": 0.7849, "eval_mar_medium": 0.4864, "eval_mar_small": 0.4253, "eval_runtime": 0.9344, "eval_samples_per_second": 31.034, "eval_steps_per_second": 2.14, "step": 8750 }, { "epoch": 70.008, "grad_norm": 17.476036071777344, "learning_rate": 1.6702222222222222e-05, "loss": 0.781, "step": 8751 }, { "epoch": 70.016, "grad_norm": 17.814207077026367, "learning_rate": 1.669777777777778e-05, "loss": 0.9154, "step": 8752 }, { "epoch": 70.024, "grad_norm": 17.868724822998047, "learning_rate": 1.669333333333333e-05, "loss": 0.5227, "step": 8753 }, { "epoch": 70.032, "grad_norm": 14.792275428771973, "learning_rate": 1.668888888888889e-05, "loss": 0.7332, "step": 8754 }, { "epoch": 70.04, "grad_norm": 16.728403091430664, "learning_rate": 1.6684444444444445e-05, "loss": 0.8699, "step": 8755 }, { "epoch": 70.048, "grad_norm": 21.69182586669922, "learning_rate": 1.668e-05, "loss": 0.6844, "step": 8756 }, { "epoch": 70.056, "grad_norm": 42.245018005371094, "learning_rate": 1.6675555555555554e-05, "loss": 1.0905, "step": 8757 }, { "epoch": 70.064, "grad_norm": 20.613489151000977, "learning_rate": 1.6671111111111113e-05, "loss": 0.9264, "step": 8758 }, { "epoch": 70.072, "grad_norm": 21.779151916503906, "learning_rate": 1.6666666666666667e-05, "loss": 0.8124, "step": 8759 }, { "epoch": 70.08, "grad_norm": 17.575759887695312, "learning_rate": 1.6662222222222222e-05, "loss": 0.9226, "step": 8760 }, { "epoch": 70.088, "grad_norm": 22.023757934570312, "learning_rate": 1.6657777777777777e-05, "loss": 0.845, "step": 8761 }, { "epoch": 70.096, "grad_norm": 23.425886154174805, "learning_rate": 1.6653333333333335e-05, "loss": 1.8725, "step": 8762 }, { "epoch": 70.104, "grad_norm": 30.961843490600586, "learning_rate": 1.664888888888889e-05, "loss": 0.8055, "step": 8763 }, { "epoch": 70.112, "grad_norm": 28.15999412536621, "learning_rate": 1.6644444444444445e-05, "loss": 0.9, "step": 8764 }, { "epoch": 70.12, "grad_norm": 30.79393196105957, "learning_rate": 1.664e-05, "loss": 0.8633, "step": 8765 }, { "epoch": 70.128, "grad_norm": 23.556673049926758, "learning_rate": 1.6635555555555558e-05, "loss": 0.6814, "step": 8766 }, { "epoch": 70.136, "grad_norm": 15.237748146057129, "learning_rate": 1.663111111111111e-05, "loss": 1.037, "step": 8767 }, { "epoch": 70.144, "grad_norm": 28.52939796447754, "learning_rate": 1.6626666666666668e-05, "loss": 1.2503, "step": 8768 }, { "epoch": 70.152, "grad_norm": 22.80520248413086, "learning_rate": 1.6622222222222223e-05, "loss": 1.2438, "step": 8769 }, { "epoch": 70.16, "grad_norm": 22.230274200439453, "learning_rate": 1.661777777777778e-05, "loss": 0.8594, "step": 8770 }, { "epoch": 70.168, "grad_norm": 23.556930541992188, "learning_rate": 1.6613333333333332e-05, "loss": 0.6745, "step": 8771 }, { "epoch": 70.176, "grad_norm": 27.68247413635254, "learning_rate": 1.660888888888889e-05, "loss": 0.7645, "step": 8772 }, { "epoch": 70.184, "grad_norm": 15.420609474182129, "learning_rate": 1.6604444444444445e-05, "loss": 0.6979, "step": 8773 }, { "epoch": 70.192, "grad_norm": 13.121918678283691, "learning_rate": 1.66e-05, "loss": 0.5529, "step": 8774 }, { "epoch": 70.2, "grad_norm": 116.67860412597656, "learning_rate": 1.6595555555555555e-05, "loss": 0.9649, "step": 8775 }, { "epoch": 70.208, "grad_norm": 18.623798370361328, "learning_rate": 1.6591111111111113e-05, "loss": 0.9477, "step": 8776 }, { "epoch": 70.216, "grad_norm": 29.694576263427734, "learning_rate": 1.6586666666666668e-05, "loss": 0.756, "step": 8777 }, { "epoch": 70.224, "grad_norm": 23.125274658203125, "learning_rate": 1.6582222222222223e-05, "loss": 0.8771, "step": 8778 }, { "epoch": 70.232, "grad_norm": 34.840965270996094, "learning_rate": 1.6577777777777778e-05, "loss": 0.8928, "step": 8779 }, { "epoch": 70.24, "grad_norm": 22.96202850341797, "learning_rate": 1.6573333333333336e-05, "loss": 1.0979, "step": 8780 }, { "epoch": 70.248, "grad_norm": 18.278682708740234, "learning_rate": 1.656888888888889e-05, "loss": 0.9699, "step": 8781 }, { "epoch": 70.256, "grad_norm": 17.817472457885742, "learning_rate": 1.6564444444444446e-05, "loss": 0.8167, "step": 8782 }, { "epoch": 70.264, "grad_norm": 21.27937126159668, "learning_rate": 1.656e-05, "loss": 0.76, "step": 8783 }, { "epoch": 70.272, "grad_norm": 25.645437240600586, "learning_rate": 1.655555555555556e-05, "loss": 0.9435, "step": 8784 }, { "epoch": 70.28, "grad_norm": 20.823720932006836, "learning_rate": 1.655111111111111e-05, "loss": 0.9063, "step": 8785 }, { "epoch": 70.288, "grad_norm": 28.78108787536621, "learning_rate": 1.654666666666667e-05, "loss": 0.924, "step": 8786 }, { "epoch": 70.296, "grad_norm": 18.070972442626953, "learning_rate": 1.6542222222222223e-05, "loss": 0.7945, "step": 8787 }, { "epoch": 70.304, "grad_norm": 14.810527801513672, "learning_rate": 1.6537777777777778e-05, "loss": 1.031, "step": 8788 }, { "epoch": 70.312, "grad_norm": 21.686580657958984, "learning_rate": 1.6533333333333333e-05, "loss": 0.8399, "step": 8789 }, { "epoch": 70.32, "grad_norm": 43.33088302612305, "learning_rate": 1.652888888888889e-05, "loss": 0.9279, "step": 8790 }, { "epoch": 70.328, "grad_norm": 29.0367488861084, "learning_rate": 1.6524444444444446e-05, "loss": 0.4485, "step": 8791 }, { "epoch": 70.336, "grad_norm": 12.405338287353516, "learning_rate": 1.652e-05, "loss": 0.8605, "step": 8792 }, { "epoch": 70.344, "grad_norm": 13.109465599060059, "learning_rate": 1.6515555555555556e-05, "loss": 0.8391, "step": 8793 }, { "epoch": 70.352, "grad_norm": 34.526634216308594, "learning_rate": 1.651111111111111e-05, "loss": 1.3082, "step": 8794 }, { "epoch": 70.36, "grad_norm": 20.35332679748535, "learning_rate": 1.650666666666667e-05, "loss": 1.1279, "step": 8795 }, { "epoch": 70.368, "grad_norm": 51.99948501586914, "learning_rate": 1.650222222222222e-05, "loss": 0.914, "step": 8796 }, { "epoch": 70.376, "grad_norm": 15.393938064575195, "learning_rate": 1.649777777777778e-05, "loss": 0.8686, "step": 8797 }, { "epoch": 70.384, "grad_norm": 27.313325881958008, "learning_rate": 1.6493333333333334e-05, "loss": 1.046, "step": 8798 }, { "epoch": 70.392, "grad_norm": 38.42367172241211, "learning_rate": 1.648888888888889e-05, "loss": 1.0948, "step": 8799 }, { "epoch": 70.4, "grad_norm": 42.546775817871094, "learning_rate": 1.6484444444444443e-05, "loss": 2.6372, "step": 8800 }, { "epoch": 70.408, "grad_norm": 24.756380081176758, "learning_rate": 1.648e-05, "loss": 1.2128, "step": 8801 }, { "epoch": 70.416, "grad_norm": 14.744026184082031, "learning_rate": 1.6475555555555556e-05, "loss": 1.1841, "step": 8802 }, { "epoch": 70.424, "grad_norm": 15.446562767028809, "learning_rate": 1.647111111111111e-05, "loss": 0.7395, "step": 8803 }, { "epoch": 70.432, "grad_norm": 36.41399383544922, "learning_rate": 1.6466666666666666e-05, "loss": 1.5513, "step": 8804 }, { "epoch": 70.44, "grad_norm": 21.111270904541016, "learning_rate": 1.6462222222222224e-05, "loss": 0.7968, "step": 8805 }, { "epoch": 70.448, "grad_norm": 29.491025924682617, "learning_rate": 1.645777777777778e-05, "loss": 0.8805, "step": 8806 }, { "epoch": 70.456, "grad_norm": 20.396099090576172, "learning_rate": 1.6453333333333334e-05, "loss": 0.9258, "step": 8807 }, { "epoch": 70.464, "grad_norm": 20.647336959838867, "learning_rate": 1.644888888888889e-05, "loss": 0.9827, "step": 8808 }, { "epoch": 70.472, "grad_norm": 27.972257614135742, "learning_rate": 1.6444444444444447e-05, "loss": 0.4565, "step": 8809 }, { "epoch": 70.48, "grad_norm": 24.263628005981445, "learning_rate": 1.644e-05, "loss": 0.9627, "step": 8810 }, { "epoch": 70.488, "grad_norm": 24.38957405090332, "learning_rate": 1.6435555555555557e-05, "loss": 1.1334, "step": 8811 }, { "epoch": 70.496, "grad_norm": 29.45347785949707, "learning_rate": 1.643111111111111e-05, "loss": 0.9067, "step": 8812 }, { "epoch": 70.504, "grad_norm": 19.30915641784668, "learning_rate": 1.6426666666666666e-05, "loss": 0.6549, "step": 8813 }, { "epoch": 70.512, "grad_norm": 41.30106735229492, "learning_rate": 1.642222222222222e-05, "loss": 0.8615, "step": 8814 }, { "epoch": 70.52, "grad_norm": 15.730725288391113, "learning_rate": 1.641777777777778e-05, "loss": 0.7753, "step": 8815 }, { "epoch": 70.528, "grad_norm": 24.051116943359375, "learning_rate": 1.6413333333333334e-05, "loss": 1.0722, "step": 8816 }, { "epoch": 70.536, "grad_norm": 21.54140853881836, "learning_rate": 1.640888888888889e-05, "loss": 1.5964, "step": 8817 }, { "epoch": 70.544, "grad_norm": 79.9949722290039, "learning_rate": 1.6404444444444444e-05, "loss": 0.6599, "step": 8818 }, { "epoch": 70.552, "grad_norm": 25.873554229736328, "learning_rate": 1.6400000000000002e-05, "loss": 0.8558, "step": 8819 }, { "epoch": 70.56, "grad_norm": 30.96624755859375, "learning_rate": 1.6395555555555557e-05, "loss": 0.6477, "step": 8820 }, { "epoch": 70.568, "grad_norm": 25.79683494567871, "learning_rate": 1.6391111111111112e-05, "loss": 1.4932, "step": 8821 }, { "epoch": 70.576, "grad_norm": 36.9320068359375, "learning_rate": 1.6386666666666667e-05, "loss": 0.889, "step": 8822 }, { "epoch": 70.584, "grad_norm": 50.800899505615234, "learning_rate": 1.6382222222222225e-05, "loss": 0.8544, "step": 8823 }, { "epoch": 70.592, "grad_norm": 19.175594329833984, "learning_rate": 1.6377777777777776e-05, "loss": 0.9013, "step": 8824 }, { "epoch": 70.6, "grad_norm": 18.7724666595459, "learning_rate": 1.6373333333333335e-05, "loss": 0.8202, "step": 8825 }, { "epoch": 70.608, "grad_norm": 18.24538230895996, "learning_rate": 1.636888888888889e-05, "loss": 0.9467, "step": 8826 }, { "epoch": 70.616, "grad_norm": 24.570884704589844, "learning_rate": 1.6364444444444448e-05, "loss": 1.0574, "step": 8827 }, { "epoch": 70.624, "grad_norm": 23.96317481994629, "learning_rate": 1.636e-05, "loss": 1.0268, "step": 8828 }, { "epoch": 70.632, "grad_norm": 17.640241622924805, "learning_rate": 1.6355555555555557e-05, "loss": 0.842, "step": 8829 }, { "epoch": 70.64, "grad_norm": 20.630136489868164, "learning_rate": 1.6351111111111112e-05, "loss": 0.868, "step": 8830 }, { "epoch": 70.648, "grad_norm": 15.661962509155273, "learning_rate": 1.6346666666666667e-05, "loss": 0.8128, "step": 8831 }, { "epoch": 70.656, "grad_norm": 44.11431884765625, "learning_rate": 1.6342222222222222e-05, "loss": 2.502, "step": 8832 }, { "epoch": 70.664, "grad_norm": 38.68297576904297, "learning_rate": 1.633777777777778e-05, "loss": 0.6699, "step": 8833 }, { "epoch": 70.672, "grad_norm": 44.05916976928711, "learning_rate": 1.6333333333333335e-05, "loss": 0.81, "step": 8834 }, { "epoch": 70.68, "grad_norm": 33.34239196777344, "learning_rate": 1.632888888888889e-05, "loss": 0.991, "step": 8835 }, { "epoch": 70.688, "grad_norm": 20.426898956298828, "learning_rate": 1.6324444444444445e-05, "loss": 1.1831, "step": 8836 }, { "epoch": 70.696, "grad_norm": 16.341327667236328, "learning_rate": 1.6320000000000003e-05, "loss": 0.6096, "step": 8837 }, { "epoch": 70.704, "grad_norm": 29.12259864807129, "learning_rate": 1.6315555555555558e-05, "loss": 0.7646, "step": 8838 }, { "epoch": 70.712, "grad_norm": 24.763948440551758, "learning_rate": 1.6311111111111113e-05, "loss": 1.0552, "step": 8839 }, { "epoch": 70.72, "grad_norm": 23.128427505493164, "learning_rate": 1.6306666666666668e-05, "loss": 0.9491, "step": 8840 }, { "epoch": 70.728, "grad_norm": 14.168166160583496, "learning_rate": 1.6302222222222226e-05, "loss": 0.6322, "step": 8841 }, { "epoch": 70.736, "grad_norm": 28.512598037719727, "learning_rate": 1.6297777777777777e-05, "loss": 0.9702, "step": 8842 }, { "epoch": 70.744, "grad_norm": 29.75005340576172, "learning_rate": 1.6293333333333335e-05, "loss": 1.1502, "step": 8843 }, { "epoch": 70.752, "grad_norm": 40.656463623046875, "learning_rate": 1.628888888888889e-05, "loss": 0.7726, "step": 8844 }, { "epoch": 70.76, "grad_norm": 34.2243537902832, "learning_rate": 1.6284444444444445e-05, "loss": 0.9795, "step": 8845 }, { "epoch": 70.768, "grad_norm": 34.92390060424805, "learning_rate": 1.628e-05, "loss": 0.8056, "step": 8846 }, { "epoch": 70.776, "grad_norm": 22.97707176208496, "learning_rate": 1.6275555555555555e-05, "loss": 0.7208, "step": 8847 }, { "epoch": 70.784, "grad_norm": 52.72904586791992, "learning_rate": 1.6271111111111113e-05, "loss": 1.2264, "step": 8848 }, { "epoch": 70.792, "grad_norm": 13.647126197814941, "learning_rate": 1.6266666666666665e-05, "loss": 1.3288, "step": 8849 }, { "epoch": 70.8, "grad_norm": 16.921646118164062, "learning_rate": 1.6262222222222223e-05, "loss": 0.7363, "step": 8850 }, { "epoch": 70.808, "grad_norm": 27.445924758911133, "learning_rate": 1.6257777777777778e-05, "loss": 0.7102, "step": 8851 }, { "epoch": 70.816, "grad_norm": 27.165719985961914, "learning_rate": 1.6253333333333336e-05, "loss": 0.9988, "step": 8852 }, { "epoch": 70.824, "grad_norm": 23.628442764282227, "learning_rate": 1.6248888888888887e-05, "loss": 0.9233, "step": 8853 }, { "epoch": 70.832, "grad_norm": 31.5395565032959, "learning_rate": 1.6244444444444446e-05, "loss": 1.0898, "step": 8854 }, { "epoch": 70.84, "grad_norm": 13.065552711486816, "learning_rate": 1.624e-05, "loss": 0.6515, "step": 8855 }, { "epoch": 70.848, "grad_norm": 26.640119552612305, "learning_rate": 1.6235555555555555e-05, "loss": 0.9635, "step": 8856 }, { "epoch": 70.856, "grad_norm": 16.33086395263672, "learning_rate": 1.623111111111111e-05, "loss": 1.1149, "step": 8857 }, { "epoch": 70.864, "grad_norm": 29.730632781982422, "learning_rate": 1.6226666666666668e-05, "loss": 1.3425, "step": 8858 }, { "epoch": 70.872, "grad_norm": 19.123720169067383, "learning_rate": 1.6222222222222223e-05, "loss": 0.9595, "step": 8859 }, { "epoch": 70.88, "grad_norm": 30.45176887512207, "learning_rate": 1.6217777777777778e-05, "loss": 1.0571, "step": 8860 }, { "epoch": 70.888, "grad_norm": 28.490140914916992, "learning_rate": 1.6213333333333333e-05, "loss": 1.0732, "step": 8861 }, { "epoch": 70.896, "grad_norm": 22.99467658996582, "learning_rate": 1.620888888888889e-05, "loss": 1.1007, "step": 8862 }, { "epoch": 70.904, "grad_norm": 24.40020179748535, "learning_rate": 1.6204444444444446e-05, "loss": 0.6387, "step": 8863 }, { "epoch": 70.912, "grad_norm": 22.68794059753418, "learning_rate": 1.62e-05, "loss": 1.1372, "step": 8864 }, { "epoch": 70.92, "grad_norm": 17.85133934020996, "learning_rate": 1.6195555555555556e-05, "loss": 1.1127, "step": 8865 }, { "epoch": 70.928, "grad_norm": 26.98098373413086, "learning_rate": 1.6191111111111114e-05, "loss": 0.811, "step": 8866 }, { "epoch": 70.936, "grad_norm": 24.050792694091797, "learning_rate": 1.6186666666666665e-05, "loss": 0.7376, "step": 8867 }, { "epoch": 70.944, "grad_norm": 18.630950927734375, "learning_rate": 1.6182222222222224e-05, "loss": 0.7572, "step": 8868 }, { "epoch": 70.952, "grad_norm": 17.518657684326172, "learning_rate": 1.617777777777778e-05, "loss": 0.7838, "step": 8869 }, { "epoch": 70.96, "grad_norm": 46.918540954589844, "learning_rate": 1.6173333333333333e-05, "loss": 1.2787, "step": 8870 }, { "epoch": 70.968, "grad_norm": 23.34897804260254, "learning_rate": 1.6168888888888888e-05, "loss": 0.8345, "step": 8871 }, { "epoch": 70.976, "grad_norm": 21.23937225341797, "learning_rate": 1.6164444444444446e-05, "loss": 0.9704, "step": 8872 }, { "epoch": 70.984, "grad_norm": 22.7259521484375, "learning_rate": 1.616e-05, "loss": 0.6901, "step": 8873 }, { "epoch": 70.992, "grad_norm": 46.40000915527344, "learning_rate": 1.6155555555555556e-05, "loss": 1.0791, "step": 8874 }, { "epoch": 71.0, "grad_norm": 19.486003875732422, "learning_rate": 1.615111111111111e-05, "loss": 1.149, "step": 8875 }, { "epoch": 71.0, "eval_loss": 0.9493652582168579, "eval_map": 0.4571, "eval_map_50": 0.8111, "eval_map_75": 0.4179, "eval_map_Coverall": 0.6384, "eval_map_Face_Shield": 0.5675, "eval_map_Gloves": 0.3862, "eval_map_Goggles": 0.2404, "eval_map_Mask": 0.453, "eval_map_large": 0.6475, "eval_map_medium": 0.3772, "eval_map_small": 0.3261, "eval_mar_1": 0.3503, "eval_mar_10": 0.5847, "eval_mar_100": 0.605, "eval_mar_100_Coverall": 0.7267, "eval_mar_100_Face_Shield": 0.7, "eval_mar_100_Gloves": 0.5082, "eval_mar_100_Goggles": 0.55, "eval_mar_100_Mask": 0.5404, "eval_mar_large": 0.7391, "eval_mar_medium": 0.5215, "eval_mar_small": 0.4363, "eval_runtime": 0.926, "eval_samples_per_second": 31.317, "eval_steps_per_second": 2.16, "step": 8875 }, { "epoch": 71.008, "grad_norm": 35.18003463745117, "learning_rate": 1.614666666666667e-05, "loss": 2.9908, "step": 8876 }, { "epoch": 71.016, "grad_norm": 27.296367645263672, "learning_rate": 1.6142222222222224e-05, "loss": 0.8807, "step": 8877 }, { "epoch": 71.024, "grad_norm": 17.848833084106445, "learning_rate": 1.613777777777778e-05, "loss": 0.7189, "step": 8878 }, { "epoch": 71.032, "grad_norm": 26.89948844909668, "learning_rate": 1.6133333333333334e-05, "loss": 1.294, "step": 8879 }, { "epoch": 71.04, "grad_norm": 22.542869567871094, "learning_rate": 1.6128888888888892e-05, "loss": 0.9001, "step": 8880 }, { "epoch": 71.048, "grad_norm": 31.132131576538086, "learning_rate": 1.6124444444444443e-05, "loss": 0.8249, "step": 8881 }, { "epoch": 71.056, "grad_norm": 28.00760841369629, "learning_rate": 1.612e-05, "loss": 0.6566, "step": 8882 }, { "epoch": 71.064, "grad_norm": 16.044981002807617, "learning_rate": 1.6115555555555556e-05, "loss": 0.7281, "step": 8883 }, { "epoch": 71.072, "grad_norm": 17.97842788696289, "learning_rate": 1.6111111111111115e-05, "loss": 0.8205, "step": 8884 }, { "epoch": 71.08, "grad_norm": 54.915035247802734, "learning_rate": 1.6106666666666666e-05, "loss": 0.9453, "step": 8885 }, { "epoch": 71.088, "grad_norm": 49.569583892822266, "learning_rate": 1.6102222222222224e-05, "loss": 1.0524, "step": 8886 }, { "epoch": 71.096, "grad_norm": 18.20779800415039, "learning_rate": 1.609777777777778e-05, "loss": 0.6182, "step": 8887 }, { "epoch": 71.104, "grad_norm": 40.317588806152344, "learning_rate": 1.6093333333333334e-05, "loss": 0.8283, "step": 8888 }, { "epoch": 71.112, "grad_norm": 17.6838436126709, "learning_rate": 1.608888888888889e-05, "loss": 1.0067, "step": 8889 }, { "epoch": 71.12, "grad_norm": 39.84739685058594, "learning_rate": 1.6084444444444447e-05, "loss": 1.0579, "step": 8890 }, { "epoch": 71.128, "grad_norm": 17.557579040527344, "learning_rate": 1.6080000000000002e-05, "loss": 0.6235, "step": 8891 }, { "epoch": 71.136, "grad_norm": 31.99443817138672, "learning_rate": 1.6075555555555557e-05, "loss": 1.0052, "step": 8892 }, { "epoch": 71.144, "grad_norm": 45.463016510009766, "learning_rate": 1.607111111111111e-05, "loss": 0.8535, "step": 8893 }, { "epoch": 71.152, "grad_norm": 33.004188537597656, "learning_rate": 1.606666666666667e-05, "loss": 1.4899, "step": 8894 }, { "epoch": 71.16, "grad_norm": 32.60430145263672, "learning_rate": 1.606222222222222e-05, "loss": 0.8855, "step": 8895 }, { "epoch": 71.168, "grad_norm": 27.721590042114258, "learning_rate": 1.605777777777778e-05, "loss": 0.967, "step": 8896 }, { "epoch": 71.176, "grad_norm": 20.358165740966797, "learning_rate": 1.6053333333333334e-05, "loss": 1.1079, "step": 8897 }, { "epoch": 71.184, "grad_norm": 85.49781799316406, "learning_rate": 1.604888888888889e-05, "loss": 1.7848, "step": 8898 }, { "epoch": 71.192, "grad_norm": 21.75218391418457, "learning_rate": 1.6044444444444444e-05, "loss": 0.7535, "step": 8899 }, { "epoch": 71.2, "grad_norm": 22.045970916748047, "learning_rate": 1.604e-05, "loss": 1.0172, "step": 8900 }, { "epoch": 71.208, "grad_norm": 34.88746643066406, "learning_rate": 1.6035555555555557e-05, "loss": 0.691, "step": 8901 }, { "epoch": 71.216, "grad_norm": 22.119068145751953, "learning_rate": 1.6031111111111112e-05, "loss": 1.1185, "step": 8902 }, { "epoch": 71.224, "grad_norm": 27.037872314453125, "learning_rate": 1.6026666666666667e-05, "loss": 0.8614, "step": 8903 }, { "epoch": 71.232, "grad_norm": 19.235984802246094, "learning_rate": 1.602222222222222e-05, "loss": 0.9722, "step": 8904 }, { "epoch": 71.24, "grad_norm": 16.42098045349121, "learning_rate": 1.601777777777778e-05, "loss": 0.703, "step": 8905 }, { "epoch": 71.248, "grad_norm": 42.503150939941406, "learning_rate": 1.601333333333333e-05, "loss": 2.1778, "step": 8906 }, { "epoch": 71.256, "grad_norm": 34.781707763671875, "learning_rate": 1.600888888888889e-05, "loss": 1.0709, "step": 8907 }, { "epoch": 71.264, "grad_norm": 18.43975830078125, "learning_rate": 1.6004444444444444e-05, "loss": 0.9424, "step": 8908 }, { "epoch": 71.272, "grad_norm": 23.333297729492188, "learning_rate": 1.6000000000000003e-05, "loss": 1.8895, "step": 8909 }, { "epoch": 71.28, "grad_norm": 46.47373962402344, "learning_rate": 1.5995555555555554e-05, "loss": 1.03, "step": 8910 }, { "epoch": 71.288, "grad_norm": 18.05512237548828, "learning_rate": 1.5991111111111112e-05, "loss": 0.8718, "step": 8911 }, { "epoch": 71.296, "grad_norm": 14.102361679077148, "learning_rate": 1.5986666666666667e-05, "loss": 1.1042, "step": 8912 }, { "epoch": 71.304, "grad_norm": 28.47450828552246, "learning_rate": 1.5982222222222222e-05, "loss": 1.2065, "step": 8913 }, { "epoch": 71.312, "grad_norm": 54.34135437011719, "learning_rate": 1.5977777777777777e-05, "loss": 0.7261, "step": 8914 }, { "epoch": 71.32, "grad_norm": 21.916044235229492, "learning_rate": 1.5973333333333335e-05, "loss": 1.1141, "step": 8915 }, { "epoch": 71.328, "grad_norm": 28.23065757751465, "learning_rate": 1.596888888888889e-05, "loss": 0.5033, "step": 8916 }, { "epoch": 71.336, "grad_norm": 37.32564926147461, "learning_rate": 1.5964444444444445e-05, "loss": 0.9038, "step": 8917 }, { "epoch": 71.344, "grad_norm": 63.164283752441406, "learning_rate": 1.596e-05, "loss": 1.8287, "step": 8918 }, { "epoch": 71.352, "grad_norm": 13.803382873535156, "learning_rate": 1.5955555555555558e-05, "loss": 0.6812, "step": 8919 }, { "epoch": 71.36, "grad_norm": 20.8936824798584, "learning_rate": 1.5951111111111113e-05, "loss": 0.8433, "step": 8920 }, { "epoch": 71.368, "grad_norm": 32.084598541259766, "learning_rate": 1.5946666666666668e-05, "loss": 0.6004, "step": 8921 }, { "epoch": 71.376, "grad_norm": 30.32988739013672, "learning_rate": 1.5942222222222222e-05, "loss": 0.6878, "step": 8922 }, { "epoch": 71.384, "grad_norm": 16.611825942993164, "learning_rate": 1.593777777777778e-05, "loss": 0.8033, "step": 8923 }, { "epoch": 71.392, "grad_norm": 76.76895904541016, "learning_rate": 1.5933333333333332e-05, "loss": 0.7016, "step": 8924 }, { "epoch": 71.4, "grad_norm": 22.049880981445312, "learning_rate": 1.592888888888889e-05, "loss": 1.1324, "step": 8925 }, { "epoch": 71.408, "grad_norm": 20.60059356689453, "learning_rate": 1.5924444444444445e-05, "loss": 1.0381, "step": 8926 }, { "epoch": 71.416, "grad_norm": 18.70222282409668, "learning_rate": 1.592e-05, "loss": 0.7176, "step": 8927 }, { "epoch": 71.424, "grad_norm": 20.022235870361328, "learning_rate": 1.5915555555555555e-05, "loss": 0.5908, "step": 8928 }, { "epoch": 71.432, "grad_norm": 30.593839645385742, "learning_rate": 1.5911111111111113e-05, "loss": 0.83, "step": 8929 }, { "epoch": 71.44, "grad_norm": 64.5436782836914, "learning_rate": 1.5906666666666668e-05, "loss": 1.5873, "step": 8930 }, { "epoch": 71.448, "grad_norm": 32.901145935058594, "learning_rate": 1.5902222222222223e-05, "loss": 0.8071, "step": 8931 }, { "epoch": 71.456, "grad_norm": 27.622865676879883, "learning_rate": 1.5897777777777778e-05, "loss": 0.9113, "step": 8932 }, { "epoch": 71.464, "grad_norm": 22.41153907775879, "learning_rate": 1.5893333333333336e-05, "loss": 0.9307, "step": 8933 }, { "epoch": 71.472, "grad_norm": 16.289804458618164, "learning_rate": 1.588888888888889e-05, "loss": 1.0623, "step": 8934 }, { "epoch": 71.48, "grad_norm": 47.1858024597168, "learning_rate": 1.5884444444444446e-05, "loss": 0.7935, "step": 8935 }, { "epoch": 71.488, "grad_norm": 34.076873779296875, "learning_rate": 1.588e-05, "loss": 0.7534, "step": 8936 }, { "epoch": 71.496, "grad_norm": 29.997678756713867, "learning_rate": 1.587555555555556e-05, "loss": 0.8027, "step": 8937 }, { "epoch": 71.504, "grad_norm": 30.003177642822266, "learning_rate": 1.587111111111111e-05, "loss": 0.9808, "step": 8938 }, { "epoch": 71.512, "grad_norm": 25.018354415893555, "learning_rate": 1.586666666666667e-05, "loss": 1.1915, "step": 8939 }, { "epoch": 71.52, "grad_norm": 22.65094566345215, "learning_rate": 1.5862222222222223e-05, "loss": 0.8151, "step": 8940 }, { "epoch": 71.528, "grad_norm": 34.75142288208008, "learning_rate": 1.5857777777777778e-05, "loss": 0.9107, "step": 8941 }, { "epoch": 71.536, "grad_norm": 34.245338439941406, "learning_rate": 1.5853333333333333e-05, "loss": 0.7603, "step": 8942 }, { "epoch": 71.544, "grad_norm": 70.24748229980469, "learning_rate": 1.584888888888889e-05, "loss": 0.7869, "step": 8943 }, { "epoch": 71.552, "grad_norm": 17.695470809936523, "learning_rate": 1.5844444444444446e-05, "loss": 0.6717, "step": 8944 }, { "epoch": 71.56, "grad_norm": 29.08383560180664, "learning_rate": 1.584e-05, "loss": 0.5096, "step": 8945 }, { "epoch": 71.568, "grad_norm": 28.000423431396484, "learning_rate": 1.5835555555555556e-05, "loss": 0.8836, "step": 8946 }, { "epoch": 71.576, "grad_norm": 18.40597152709961, "learning_rate": 1.5831111111111114e-05, "loss": 0.9221, "step": 8947 }, { "epoch": 71.584, "grad_norm": 27.07490348815918, "learning_rate": 1.582666666666667e-05, "loss": 0.9857, "step": 8948 }, { "epoch": 71.592, "grad_norm": 141.1805877685547, "learning_rate": 1.582222222222222e-05, "loss": 0.7825, "step": 8949 }, { "epoch": 71.6, "grad_norm": 20.502412796020508, "learning_rate": 1.581777777777778e-05, "loss": 0.9298, "step": 8950 }, { "epoch": 71.608, "grad_norm": 34.46352767944336, "learning_rate": 1.5813333333333333e-05, "loss": 0.963, "step": 8951 }, { "epoch": 71.616, "grad_norm": 21.91301918029785, "learning_rate": 1.5808888888888888e-05, "loss": 1.0562, "step": 8952 }, { "epoch": 71.624, "grad_norm": 35.58258056640625, "learning_rate": 1.5804444444444443e-05, "loss": 1.2537, "step": 8953 }, { "epoch": 71.632, "grad_norm": 16.53951644897461, "learning_rate": 1.58e-05, "loss": 0.6647, "step": 8954 }, { "epoch": 71.64, "grad_norm": 22.770206451416016, "learning_rate": 1.5795555555555556e-05, "loss": 1.3535, "step": 8955 }, { "epoch": 71.648, "grad_norm": 23.727903366088867, "learning_rate": 1.579111111111111e-05, "loss": 0.9066, "step": 8956 }, { "epoch": 71.656, "grad_norm": 24.844697952270508, "learning_rate": 1.5786666666666666e-05, "loss": 0.8815, "step": 8957 }, { "epoch": 71.664, "grad_norm": 61.48876953125, "learning_rate": 1.5782222222222224e-05, "loss": 1.1901, "step": 8958 }, { "epoch": 71.672, "grad_norm": 18.6036376953125, "learning_rate": 1.577777777777778e-05, "loss": 0.8062, "step": 8959 }, { "epoch": 71.68, "grad_norm": 23.209592819213867, "learning_rate": 1.5773333333333334e-05, "loss": 0.6982, "step": 8960 }, { "epoch": 71.688, "grad_norm": 26.379043579101562, "learning_rate": 1.576888888888889e-05, "loss": 0.6439, "step": 8961 }, { "epoch": 71.696, "grad_norm": 33.758724212646484, "learning_rate": 1.5764444444444447e-05, "loss": 1.0189, "step": 8962 }, { "epoch": 71.704, "grad_norm": 16.741819381713867, "learning_rate": 1.5759999999999998e-05, "loss": 1.0018, "step": 8963 }, { "epoch": 71.712, "grad_norm": 20.724956512451172, "learning_rate": 1.5755555555555556e-05, "loss": 0.6244, "step": 8964 }, { "epoch": 71.72, "grad_norm": 15.561535835266113, "learning_rate": 1.575111111111111e-05, "loss": 0.9556, "step": 8965 }, { "epoch": 71.728, "grad_norm": 21.55799674987793, "learning_rate": 1.574666666666667e-05, "loss": 0.8554, "step": 8966 }, { "epoch": 71.736, "grad_norm": 22.12864875793457, "learning_rate": 1.574222222222222e-05, "loss": 1.1918, "step": 8967 }, { "epoch": 71.744, "grad_norm": 76.6997299194336, "learning_rate": 1.573777777777778e-05, "loss": 0.7617, "step": 8968 }, { "epoch": 71.752, "grad_norm": 20.478017807006836, "learning_rate": 1.5733333333333334e-05, "loss": 0.8025, "step": 8969 }, { "epoch": 71.76, "grad_norm": 11.987592697143555, "learning_rate": 1.572888888888889e-05, "loss": 1.199, "step": 8970 }, { "epoch": 71.768, "grad_norm": 19.903793334960938, "learning_rate": 1.5724444444444444e-05, "loss": 0.7675, "step": 8971 }, { "epoch": 71.776, "grad_norm": 34.99492645263672, "learning_rate": 1.5720000000000002e-05, "loss": 0.8641, "step": 8972 }, { "epoch": 71.784, "grad_norm": 45.095130920410156, "learning_rate": 1.5715555555555557e-05, "loss": 1.1034, "step": 8973 }, { "epoch": 71.792, "grad_norm": 21.64255714416504, "learning_rate": 1.571111111111111e-05, "loss": 0.7038, "step": 8974 }, { "epoch": 71.8, "grad_norm": 47.79658508300781, "learning_rate": 1.5706666666666666e-05, "loss": 0.9565, "step": 8975 }, { "epoch": 71.808, "grad_norm": 18.696216583251953, "learning_rate": 1.5702222222222225e-05, "loss": 0.7867, "step": 8976 }, { "epoch": 71.816, "grad_norm": 43.33997344970703, "learning_rate": 1.569777777777778e-05, "loss": 0.9538, "step": 8977 }, { "epoch": 71.824, "grad_norm": 15.327493667602539, "learning_rate": 1.5693333333333334e-05, "loss": 1.1815, "step": 8978 }, { "epoch": 71.832, "grad_norm": 23.51276969909668, "learning_rate": 1.568888888888889e-05, "loss": 0.7525, "step": 8979 }, { "epoch": 71.84, "grad_norm": 21.000629425048828, "learning_rate": 1.5684444444444447e-05, "loss": 0.6549, "step": 8980 }, { "epoch": 71.848, "grad_norm": 19.19567108154297, "learning_rate": 1.568e-05, "loss": 0.9083, "step": 8981 }, { "epoch": 71.856, "grad_norm": 19.32100486755371, "learning_rate": 1.5675555555555557e-05, "loss": 1.0041, "step": 8982 }, { "epoch": 71.864, "grad_norm": 32.6912841796875, "learning_rate": 1.5671111111111112e-05, "loss": 0.7476, "step": 8983 }, { "epoch": 71.872, "grad_norm": 20.60258674621582, "learning_rate": 1.5666666666666667e-05, "loss": 1.0555, "step": 8984 }, { "epoch": 71.88, "grad_norm": 19.65373420715332, "learning_rate": 1.5662222222222222e-05, "loss": 0.8883, "step": 8985 }, { "epoch": 71.888, "grad_norm": 27.23861312866211, "learning_rate": 1.565777777777778e-05, "loss": 0.4801, "step": 8986 }, { "epoch": 71.896, "grad_norm": 20.93113136291504, "learning_rate": 1.5653333333333335e-05, "loss": 0.7139, "step": 8987 }, { "epoch": 71.904, "grad_norm": 28.058488845825195, "learning_rate": 1.564888888888889e-05, "loss": 1.5304, "step": 8988 }, { "epoch": 71.912, "grad_norm": 19.273866653442383, "learning_rate": 1.5644444444444444e-05, "loss": 0.6283, "step": 8989 }, { "epoch": 71.92, "grad_norm": 40.24531173706055, "learning_rate": 1.5640000000000003e-05, "loss": 0.9674, "step": 8990 }, { "epoch": 71.928, "grad_norm": 23.052392959594727, "learning_rate": 1.5635555555555558e-05, "loss": 0.7836, "step": 8991 }, { "epoch": 71.936, "grad_norm": 19.545196533203125, "learning_rate": 1.5631111111111112e-05, "loss": 0.5714, "step": 8992 }, { "epoch": 71.944, "grad_norm": 25.040477752685547, "learning_rate": 1.5626666666666667e-05, "loss": 1.0297, "step": 8993 }, { "epoch": 71.952, "grad_norm": 177.88815307617188, "learning_rate": 1.5622222222222225e-05, "loss": 1.0591, "step": 8994 }, { "epoch": 71.96, "grad_norm": 27.359037399291992, "learning_rate": 1.5617777777777777e-05, "loss": 0.8164, "step": 8995 }, { "epoch": 71.968, "grad_norm": 34.09805679321289, "learning_rate": 1.5613333333333335e-05, "loss": 0.9224, "step": 8996 }, { "epoch": 71.976, "grad_norm": 25.07474708557129, "learning_rate": 1.560888888888889e-05, "loss": 0.9215, "step": 8997 }, { "epoch": 71.984, "grad_norm": 24.144880294799805, "learning_rate": 1.5604444444444445e-05, "loss": 1.1679, "step": 8998 }, { "epoch": 71.992, "grad_norm": 108.19041442871094, "learning_rate": 1.56e-05, "loss": 0.7774, "step": 8999 }, { "epoch": 72.0, "grad_norm": 14.56849193572998, "learning_rate": 1.5595555555555555e-05, "loss": 0.8077, "step": 9000 }, { "epoch": 72.0, "eval_loss": 0.9655598402023315, "eval_map": 0.4394, "eval_map_50": 0.803, "eval_map_75": 0.4259, "eval_map_Coverall": 0.6534, "eval_map_Face_Shield": 0.5195, "eval_map_Gloves": 0.3666, "eval_map_Goggles": 0.2334, "eval_map_Mask": 0.424, "eval_map_large": 0.6313, "eval_map_medium": 0.3581, "eval_map_small": 0.3559, "eval_mar_1": 0.3335, "eval_mar_10": 0.5841, "eval_mar_100": 0.5963, "eval_mar_100_Coverall": 0.7244, "eval_mar_100_Face_Shield": 0.7294, "eval_mar_100_Gloves": 0.5, "eval_mar_100_Goggles": 0.4969, "eval_mar_100_Mask": 0.5308, "eval_mar_large": 0.7389, "eval_mar_medium": 0.5021, "eval_mar_small": 0.4525, "eval_runtime": 0.9269, "eval_samples_per_second": 31.286, "eval_steps_per_second": 2.158, "step": 9000 }, { "epoch": 72.008, "grad_norm": 14.786879539489746, "learning_rate": 1.5591111111111113e-05, "loss": 0.6447, "step": 9001 }, { "epoch": 72.016, "grad_norm": 23.685302734375, "learning_rate": 1.5586666666666668e-05, "loss": 0.9522, "step": 9002 }, { "epoch": 72.024, "grad_norm": 43.622406005859375, "learning_rate": 1.5582222222222222e-05, "loss": 0.6681, "step": 9003 }, { "epoch": 72.032, "grad_norm": 13.419453620910645, "learning_rate": 1.5577777777777777e-05, "loss": 1.0097, "step": 9004 }, { "epoch": 72.04, "grad_norm": 15.6317720413208, "learning_rate": 1.5573333333333336e-05, "loss": 0.5289, "step": 9005 }, { "epoch": 72.048, "grad_norm": 15.639211654663086, "learning_rate": 1.5568888888888887e-05, "loss": 0.9366, "step": 9006 }, { "epoch": 72.056, "grad_norm": 41.01308059692383, "learning_rate": 1.5564444444444445e-05, "loss": 0.7761, "step": 9007 }, { "epoch": 72.064, "grad_norm": 33.234920501708984, "learning_rate": 1.556e-05, "loss": 0.5489, "step": 9008 }, { "epoch": 72.072, "grad_norm": 13.603095054626465, "learning_rate": 1.5555555555555555e-05, "loss": 0.8701, "step": 9009 }, { "epoch": 72.08, "grad_norm": 36.21076965332031, "learning_rate": 1.555111111111111e-05, "loss": 0.8996, "step": 9010 }, { "epoch": 72.088, "grad_norm": 29.697837829589844, "learning_rate": 1.5546666666666668e-05, "loss": 0.8488, "step": 9011 }, { "epoch": 72.096, "grad_norm": 28.049358367919922, "learning_rate": 1.5542222222222223e-05, "loss": 0.7386, "step": 9012 }, { "epoch": 72.104, "grad_norm": 41.58122253417969, "learning_rate": 1.5537777777777778e-05, "loss": 0.9898, "step": 9013 }, { "epoch": 72.112, "grad_norm": 27.56369400024414, "learning_rate": 1.5533333333333333e-05, "loss": 0.7769, "step": 9014 }, { "epoch": 72.12, "grad_norm": 16.96443748474121, "learning_rate": 1.552888888888889e-05, "loss": 1.0181, "step": 9015 }, { "epoch": 72.128, "grad_norm": 53.938838958740234, "learning_rate": 1.5524444444444446e-05, "loss": 0.7669, "step": 9016 }, { "epoch": 72.136, "grad_norm": 23.308732986450195, "learning_rate": 1.552e-05, "loss": 0.709, "step": 9017 }, { "epoch": 72.144, "grad_norm": 31.347410202026367, "learning_rate": 1.5515555555555555e-05, "loss": 1.0564, "step": 9018 }, { "epoch": 72.152, "grad_norm": 15.95107650756836, "learning_rate": 1.5511111111111114e-05, "loss": 1.2352, "step": 9019 }, { "epoch": 72.16, "grad_norm": 16.663436889648438, "learning_rate": 1.5506666666666665e-05, "loss": 0.763, "step": 9020 }, { "epoch": 72.168, "grad_norm": 37.25482177734375, "learning_rate": 1.5502222222222223e-05, "loss": 1.0745, "step": 9021 }, { "epoch": 72.176, "grad_norm": 17.962173461914062, "learning_rate": 1.5497777777777778e-05, "loss": 0.6364, "step": 9022 }, { "epoch": 72.184, "grad_norm": 21.607351303100586, "learning_rate": 1.5493333333333336e-05, "loss": 0.7785, "step": 9023 }, { "epoch": 72.192, "grad_norm": 16.711116790771484, "learning_rate": 1.5488888888888888e-05, "loss": 0.9032, "step": 9024 }, { "epoch": 72.2, "grad_norm": 20.386432647705078, "learning_rate": 1.5484444444444446e-05, "loss": 0.7571, "step": 9025 }, { "epoch": 72.208, "grad_norm": 23.196609497070312, "learning_rate": 1.548e-05, "loss": 0.9086, "step": 9026 }, { "epoch": 72.216, "grad_norm": 15.142984390258789, "learning_rate": 1.5475555555555556e-05, "loss": 0.7197, "step": 9027 }, { "epoch": 72.224, "grad_norm": 40.00871276855469, "learning_rate": 1.547111111111111e-05, "loss": 2.2546, "step": 9028 }, { "epoch": 72.232, "grad_norm": 25.616674423217773, "learning_rate": 1.546666666666667e-05, "loss": 0.7399, "step": 9029 }, { "epoch": 72.24, "grad_norm": 24.374311447143555, "learning_rate": 1.5462222222222224e-05, "loss": 0.9818, "step": 9030 }, { "epoch": 72.248, "grad_norm": 34.38438415527344, "learning_rate": 1.545777777777778e-05, "loss": 0.7722, "step": 9031 }, { "epoch": 72.256, "grad_norm": 22.968172073364258, "learning_rate": 1.5453333333333333e-05, "loss": 0.6361, "step": 9032 }, { "epoch": 72.264, "grad_norm": 15.733457565307617, "learning_rate": 1.544888888888889e-05, "loss": 0.7827, "step": 9033 }, { "epoch": 72.272, "grad_norm": 42.64008712768555, "learning_rate": 1.5444444444444446e-05, "loss": 1.7133, "step": 9034 }, { "epoch": 72.28, "grad_norm": 30.384674072265625, "learning_rate": 1.544e-05, "loss": 1.8703, "step": 9035 }, { "epoch": 72.288, "grad_norm": 61.34203338623047, "learning_rate": 1.5435555555555556e-05, "loss": 0.5358, "step": 9036 }, { "epoch": 72.296, "grad_norm": 28.5566349029541, "learning_rate": 1.5431111111111114e-05, "loss": 0.9177, "step": 9037 }, { "epoch": 72.304, "grad_norm": 26.856538772583008, "learning_rate": 1.5426666666666666e-05, "loss": 0.7574, "step": 9038 }, { "epoch": 72.312, "grad_norm": 38.8994140625, "learning_rate": 1.5422222222222224e-05, "loss": 0.9246, "step": 9039 }, { "epoch": 72.32, "grad_norm": 30.266725540161133, "learning_rate": 1.541777777777778e-05, "loss": 0.8784, "step": 9040 }, { "epoch": 72.328, "grad_norm": 19.651824951171875, "learning_rate": 1.5413333333333334e-05, "loss": 0.8116, "step": 9041 }, { "epoch": 72.336, "grad_norm": 25.43199920654297, "learning_rate": 1.540888888888889e-05, "loss": 0.8093, "step": 9042 }, { "epoch": 72.344, "grad_norm": 28.940160751342773, "learning_rate": 1.5404444444444447e-05, "loss": 0.6046, "step": 9043 }, { "epoch": 72.352, "grad_norm": 21.26909065246582, "learning_rate": 1.54e-05, "loss": 0.7985, "step": 9044 }, { "epoch": 72.36, "grad_norm": 29.68324089050293, "learning_rate": 1.5395555555555556e-05, "loss": 0.8899, "step": 9045 }, { "epoch": 72.368, "grad_norm": 32.01289367675781, "learning_rate": 1.539111111111111e-05, "loss": 0.8841, "step": 9046 }, { "epoch": 72.376, "grad_norm": 17.064477920532227, "learning_rate": 1.538666666666667e-05, "loss": 1.0371, "step": 9047 }, { "epoch": 72.384, "grad_norm": 43.192840576171875, "learning_rate": 1.5382222222222224e-05, "loss": 0.9185, "step": 9048 }, { "epoch": 72.392, "grad_norm": 19.78449249267578, "learning_rate": 1.537777777777778e-05, "loss": 0.9879, "step": 9049 }, { "epoch": 72.4, "grad_norm": 46.528648376464844, "learning_rate": 1.5373333333333334e-05, "loss": 0.8171, "step": 9050 }, { "epoch": 72.408, "grad_norm": 31.167728424072266, "learning_rate": 1.5368888888888892e-05, "loss": 0.8247, "step": 9051 }, { "epoch": 72.416, "grad_norm": 76.75357055664062, "learning_rate": 1.5364444444444444e-05, "loss": 1.7666, "step": 9052 }, { "epoch": 72.424, "grad_norm": 24.0643367767334, "learning_rate": 1.536e-05, "loss": 0.6786, "step": 9053 }, { "epoch": 72.432, "grad_norm": 21.955482482910156, "learning_rate": 1.5355555555555557e-05, "loss": 0.7393, "step": 9054 }, { "epoch": 72.44, "grad_norm": 17.113906860351562, "learning_rate": 1.5351111111111112e-05, "loss": 0.8461, "step": 9055 }, { "epoch": 72.448, "grad_norm": 30.982563018798828, "learning_rate": 1.5346666666666667e-05, "loss": 0.901, "step": 9056 }, { "epoch": 72.456, "grad_norm": 32.70757293701172, "learning_rate": 1.534222222222222e-05, "loss": 1.1845, "step": 9057 }, { "epoch": 72.464, "grad_norm": 63.13407897949219, "learning_rate": 1.533777777777778e-05, "loss": 2.5931, "step": 9058 }, { "epoch": 72.472, "grad_norm": 53.44038391113281, "learning_rate": 1.5333333333333334e-05, "loss": 0.9553, "step": 9059 }, { "epoch": 72.48, "grad_norm": 20.32192611694336, "learning_rate": 1.532888888888889e-05, "loss": 1.3468, "step": 9060 }, { "epoch": 72.488, "grad_norm": 29.51474952697754, "learning_rate": 1.5324444444444444e-05, "loss": 0.7334, "step": 9061 }, { "epoch": 72.496, "grad_norm": 14.538850784301758, "learning_rate": 1.5320000000000002e-05, "loss": 0.9914, "step": 9062 }, { "epoch": 72.504, "grad_norm": 39.19458770751953, "learning_rate": 1.5315555555555554e-05, "loss": 0.9207, "step": 9063 }, { "epoch": 72.512, "grad_norm": 20.433637619018555, "learning_rate": 1.5311111111111112e-05, "loss": 0.6668, "step": 9064 }, { "epoch": 72.52, "grad_norm": 51.22861099243164, "learning_rate": 1.5306666666666667e-05, "loss": 1.1924, "step": 9065 }, { "epoch": 72.528, "grad_norm": 25.75701332092285, "learning_rate": 1.5302222222222222e-05, "loss": 0.6519, "step": 9066 }, { "epoch": 72.536, "grad_norm": 19.214563369750977, "learning_rate": 1.5297777777777777e-05, "loss": 0.8482, "step": 9067 }, { "epoch": 72.544, "grad_norm": 27.86107063293457, "learning_rate": 1.5293333333333335e-05, "loss": 0.6977, "step": 9068 }, { "epoch": 72.552, "grad_norm": 29.151065826416016, "learning_rate": 1.528888888888889e-05, "loss": 1.0451, "step": 9069 }, { "epoch": 72.56, "grad_norm": 18.989688873291016, "learning_rate": 1.5284444444444445e-05, "loss": 1.2466, "step": 9070 }, { "epoch": 72.568, "grad_norm": 33.98352813720703, "learning_rate": 1.528e-05, "loss": 0.9421, "step": 9071 }, { "epoch": 72.576, "grad_norm": 24.34566879272461, "learning_rate": 1.5275555555555558e-05, "loss": 0.722, "step": 9072 }, { "epoch": 72.584, "grad_norm": 31.7718448638916, "learning_rate": 1.5271111111111112e-05, "loss": 0.989, "step": 9073 }, { "epoch": 72.592, "grad_norm": 28.199317932128906, "learning_rate": 1.5266666666666667e-05, "loss": 0.7065, "step": 9074 }, { "epoch": 72.6, "grad_norm": 128.5464324951172, "learning_rate": 1.5262222222222222e-05, "loss": 0.6613, "step": 9075 }, { "epoch": 72.608, "grad_norm": 16.306987762451172, "learning_rate": 1.5257777777777779e-05, "loss": 0.9067, "step": 9076 }, { "epoch": 72.616, "grad_norm": 40.40427017211914, "learning_rate": 1.5253333333333334e-05, "loss": 0.7807, "step": 9077 }, { "epoch": 72.624, "grad_norm": 33.06645965576172, "learning_rate": 1.524888888888889e-05, "loss": 0.8537, "step": 9078 }, { "epoch": 72.632, "grad_norm": 62.49217987060547, "learning_rate": 1.5244444444444445e-05, "loss": 0.9107, "step": 9079 }, { "epoch": 72.64, "grad_norm": 14.557249069213867, "learning_rate": 1.5240000000000001e-05, "loss": 0.9692, "step": 9080 }, { "epoch": 72.648, "grad_norm": 14.63774299621582, "learning_rate": 1.5235555555555556e-05, "loss": 0.9872, "step": 9081 }, { "epoch": 72.656, "grad_norm": 29.029521942138672, "learning_rate": 1.5231111111111113e-05, "loss": 0.9924, "step": 9082 }, { "epoch": 72.664, "grad_norm": 32.086151123046875, "learning_rate": 1.5226666666666668e-05, "loss": 0.6059, "step": 9083 }, { "epoch": 72.672, "grad_norm": 75.06587219238281, "learning_rate": 1.5222222222222224e-05, "loss": 0.7683, "step": 9084 }, { "epoch": 72.68, "grad_norm": 23.782495498657227, "learning_rate": 1.5217777777777777e-05, "loss": 0.854, "step": 9085 }, { "epoch": 72.688, "grad_norm": 21.967641830444336, "learning_rate": 1.5213333333333336e-05, "loss": 0.6888, "step": 9086 }, { "epoch": 72.696, "grad_norm": 18.367141723632812, "learning_rate": 1.5208888888888889e-05, "loss": 0.858, "step": 9087 }, { "epoch": 72.704, "grad_norm": 33.91196060180664, "learning_rate": 1.5204444444444445e-05, "loss": 0.9077, "step": 9088 }, { "epoch": 72.712, "grad_norm": 22.399093627929688, "learning_rate": 1.52e-05, "loss": 0.79, "step": 9089 }, { "epoch": 72.72, "grad_norm": 21.991439819335938, "learning_rate": 1.5195555555555557e-05, "loss": 1.051, "step": 9090 }, { "epoch": 72.728, "grad_norm": 67.23423767089844, "learning_rate": 1.5191111111111112e-05, "loss": 0.9413, "step": 9091 }, { "epoch": 72.736, "grad_norm": 33.14324188232422, "learning_rate": 1.5186666666666668e-05, "loss": 1.1338, "step": 9092 }, { "epoch": 72.744, "grad_norm": 18.741657257080078, "learning_rate": 1.5182222222222223e-05, "loss": 0.8684, "step": 9093 }, { "epoch": 72.752, "grad_norm": 19.0715274810791, "learning_rate": 1.517777777777778e-05, "loss": 0.6862, "step": 9094 }, { "epoch": 72.76, "grad_norm": 19.39577293395996, "learning_rate": 1.5173333333333334e-05, "loss": 1.0812, "step": 9095 }, { "epoch": 72.768, "grad_norm": 45.34283447265625, "learning_rate": 1.5168888888888891e-05, "loss": 0.8305, "step": 9096 }, { "epoch": 72.776, "grad_norm": 35.99919128417969, "learning_rate": 1.5164444444444446e-05, "loss": 0.6196, "step": 9097 }, { "epoch": 72.784, "grad_norm": 54.118675231933594, "learning_rate": 1.5160000000000002e-05, "loss": 1.0101, "step": 9098 }, { "epoch": 72.792, "grad_norm": 16.284616470336914, "learning_rate": 1.5155555555555555e-05, "loss": 0.5834, "step": 9099 }, { "epoch": 72.8, "grad_norm": 28.809059143066406, "learning_rate": 1.5151111111111114e-05, "loss": 0.9135, "step": 9100 }, { "epoch": 72.808, "grad_norm": 28.054668426513672, "learning_rate": 1.5146666666666667e-05, "loss": 0.8538, "step": 9101 }, { "epoch": 72.816, "grad_norm": 38.82904815673828, "learning_rate": 1.5142222222222225e-05, "loss": 0.9983, "step": 9102 }, { "epoch": 72.824, "grad_norm": 19.555931091308594, "learning_rate": 1.5137777777777778e-05, "loss": 0.8344, "step": 9103 }, { "epoch": 72.832, "grad_norm": 40.50398254394531, "learning_rate": 1.5133333333333333e-05, "loss": 0.9539, "step": 9104 }, { "epoch": 72.84, "grad_norm": 18.741071701049805, "learning_rate": 1.512888888888889e-05, "loss": 0.656, "step": 9105 }, { "epoch": 72.848, "grad_norm": 16.941892623901367, "learning_rate": 1.5124444444444444e-05, "loss": 0.8926, "step": 9106 }, { "epoch": 72.856, "grad_norm": 51.97566604614258, "learning_rate": 1.5120000000000001e-05, "loss": 1.6769, "step": 9107 }, { "epoch": 72.864, "grad_norm": 41.21043014526367, "learning_rate": 1.5115555555555556e-05, "loss": 0.7866, "step": 9108 }, { "epoch": 72.872, "grad_norm": 30.40704917907715, "learning_rate": 1.5111111111111112e-05, "loss": 1.1686, "step": 9109 }, { "epoch": 72.88, "grad_norm": 25.97248649597168, "learning_rate": 1.5106666666666665e-05, "loss": 0.98, "step": 9110 }, { "epoch": 72.888, "grad_norm": 272.389404296875, "learning_rate": 1.5102222222222224e-05, "loss": 1.1307, "step": 9111 }, { "epoch": 72.896, "grad_norm": 20.4150333404541, "learning_rate": 1.5097777777777777e-05, "loss": 1.333, "step": 9112 }, { "epoch": 72.904, "grad_norm": 38.57637023925781, "learning_rate": 1.5093333333333335e-05, "loss": 0.7453, "step": 9113 }, { "epoch": 72.912, "grad_norm": 18.521652221679688, "learning_rate": 1.5088888888888888e-05, "loss": 1.0885, "step": 9114 }, { "epoch": 72.92, "grad_norm": 24.977676391601562, "learning_rate": 1.5084444444444445e-05, "loss": 1.3511, "step": 9115 }, { "epoch": 72.928, "grad_norm": 35.49876403808594, "learning_rate": 1.508e-05, "loss": 0.854, "step": 9116 }, { "epoch": 72.936, "grad_norm": 15.786067008972168, "learning_rate": 1.5075555555555556e-05, "loss": 0.7765, "step": 9117 }, { "epoch": 72.944, "grad_norm": 56.4359245300293, "learning_rate": 1.5071111111111111e-05, "loss": 1.0093, "step": 9118 }, { "epoch": 72.952, "grad_norm": 21.881059646606445, "learning_rate": 1.5066666666666668e-05, "loss": 1.1831, "step": 9119 }, { "epoch": 72.96, "grad_norm": 25.746906280517578, "learning_rate": 1.5062222222222222e-05, "loss": 0.726, "step": 9120 }, { "epoch": 72.968, "grad_norm": 16.511159896850586, "learning_rate": 1.5057777777777779e-05, "loss": 0.5717, "step": 9121 }, { "epoch": 72.976, "grad_norm": 20.66166877746582, "learning_rate": 1.5053333333333334e-05, "loss": 0.5112, "step": 9122 }, { "epoch": 72.984, "grad_norm": 19.51554298400879, "learning_rate": 1.504888888888889e-05, "loss": 1.0793, "step": 9123 }, { "epoch": 72.992, "grad_norm": 22.063257217407227, "learning_rate": 1.5044444444444445e-05, "loss": 0.8557, "step": 9124 }, { "epoch": 73.0, "grad_norm": 45.00876998901367, "learning_rate": 1.5040000000000002e-05, "loss": 2.2146, "step": 9125 }, { "epoch": 73.0, "eval_loss": 0.9616978764533997, "eval_map": 0.4543, "eval_map_50": 0.7966, "eval_map_75": 0.461, "eval_map_Coverall": 0.6525, "eval_map_Face_Shield": 0.5859, "eval_map_Gloves": 0.3545, "eval_map_Goggles": 0.2584, "eval_map_Mask": 0.4201, "eval_map_large": 0.6455, "eval_map_medium": 0.3542, "eval_map_small": 0.3851, "eval_mar_1": 0.3397, "eval_mar_10": 0.5885, "eval_mar_100": 0.5958, "eval_mar_100_Coverall": 0.7356, "eval_mar_100_Face_Shield": 0.7412, "eval_mar_100_Gloves": 0.4803, "eval_mar_100_Goggles": 0.5125, "eval_mar_100_Mask": 0.5096, "eval_mar_large": 0.7148, "eval_mar_medium": 0.4955, "eval_mar_small": 0.4793, "eval_runtime": 0.935, "eval_samples_per_second": 31.016, "eval_steps_per_second": 2.139, "step": 9125 }, { "epoch": 73.008, "grad_norm": 33.666526794433594, "learning_rate": 1.5035555555555555e-05, "loss": 0.5901, "step": 9126 }, { "epoch": 73.016, "grad_norm": 21.435848236083984, "learning_rate": 1.5031111111111113e-05, "loss": 1.2564, "step": 9127 }, { "epoch": 73.024, "grad_norm": 33.69278335571289, "learning_rate": 1.5026666666666666e-05, "loss": 1.0227, "step": 9128 }, { "epoch": 73.032, "grad_norm": 23.040681838989258, "learning_rate": 1.5022222222222224e-05, "loss": 0.7344, "step": 9129 }, { "epoch": 73.04, "grad_norm": 25.473373413085938, "learning_rate": 1.5017777777777778e-05, "loss": 0.9205, "step": 9130 }, { "epoch": 73.048, "grad_norm": 17.6494083404541, "learning_rate": 1.5013333333333334e-05, "loss": 0.6763, "step": 9131 }, { "epoch": 73.056, "grad_norm": 14.134173393249512, "learning_rate": 1.5008888888888889e-05, "loss": 0.8782, "step": 9132 }, { "epoch": 73.064, "grad_norm": 161.94747924804688, "learning_rate": 1.5004444444444446e-05, "loss": 2.7842, "step": 9133 }, { "epoch": 73.072, "grad_norm": 18.50273323059082, "learning_rate": 1.5e-05, "loss": 0.788, "step": 9134 }, { "epoch": 73.08, "grad_norm": 23.109840393066406, "learning_rate": 1.4995555555555557e-05, "loss": 0.8564, "step": 9135 }, { "epoch": 73.088, "grad_norm": 28.518198013305664, "learning_rate": 1.4991111111111112e-05, "loss": 0.8629, "step": 9136 }, { "epoch": 73.096, "grad_norm": 45.85733413696289, "learning_rate": 1.4986666666666668e-05, "loss": 0.6246, "step": 9137 }, { "epoch": 73.104, "grad_norm": 24.416976928710938, "learning_rate": 1.4982222222222223e-05, "loss": 0.89, "step": 9138 }, { "epoch": 73.112, "grad_norm": 23.33110237121582, "learning_rate": 1.497777777777778e-05, "loss": 0.7696, "step": 9139 }, { "epoch": 73.12, "grad_norm": 23.044321060180664, "learning_rate": 1.4973333333333333e-05, "loss": 1.0948, "step": 9140 }, { "epoch": 73.128, "grad_norm": 24.567934036254883, "learning_rate": 1.4968888888888891e-05, "loss": 1.1176, "step": 9141 }, { "epoch": 73.136, "grad_norm": 22.466825485229492, "learning_rate": 1.4964444444444444e-05, "loss": 0.9381, "step": 9142 }, { "epoch": 73.144, "grad_norm": 28.971023559570312, "learning_rate": 1.4960000000000002e-05, "loss": 0.8133, "step": 9143 }, { "epoch": 73.152, "grad_norm": 35.56562423706055, "learning_rate": 1.4955555555555556e-05, "loss": 0.8098, "step": 9144 }, { "epoch": 73.16, "grad_norm": 24.38973045349121, "learning_rate": 1.4951111111111112e-05, "loss": 0.8765, "step": 9145 }, { "epoch": 73.168, "grad_norm": 51.29257583618164, "learning_rate": 1.4946666666666667e-05, "loss": 1.1745, "step": 9146 }, { "epoch": 73.176, "grad_norm": 12.924586296081543, "learning_rate": 1.4942222222222224e-05, "loss": 0.8096, "step": 9147 }, { "epoch": 73.184, "grad_norm": 24.90133285522461, "learning_rate": 1.4937777777777778e-05, "loss": 0.999, "step": 9148 }, { "epoch": 73.192, "grad_norm": 19.31430435180664, "learning_rate": 1.4933333333333335e-05, "loss": 1.0644, "step": 9149 }, { "epoch": 73.2, "grad_norm": 41.031166076660156, "learning_rate": 1.492888888888889e-05, "loss": 1.4891, "step": 9150 }, { "epoch": 73.208, "grad_norm": 30.946102142333984, "learning_rate": 1.4924444444444446e-05, "loss": 0.8853, "step": 9151 }, { "epoch": 73.216, "grad_norm": 28.035842895507812, "learning_rate": 1.4920000000000001e-05, "loss": 0.7009, "step": 9152 }, { "epoch": 73.224, "grad_norm": 30.228946685791016, "learning_rate": 1.4915555555555558e-05, "loss": 1.7394, "step": 9153 }, { "epoch": 73.232, "grad_norm": 39.72758865356445, "learning_rate": 1.4911111111111113e-05, "loss": 0.8703, "step": 9154 }, { "epoch": 73.24, "grad_norm": 21.193626403808594, "learning_rate": 1.4906666666666666e-05, "loss": 0.5981, "step": 9155 }, { "epoch": 73.248, "grad_norm": 23.82907485961914, "learning_rate": 1.4902222222222222e-05, "loss": 0.8147, "step": 9156 }, { "epoch": 73.256, "grad_norm": 31.272232055664062, "learning_rate": 1.4897777777777777e-05, "loss": 1.2199, "step": 9157 }, { "epoch": 73.264, "grad_norm": 27.429107666015625, "learning_rate": 1.4893333333333334e-05, "loss": 0.8508, "step": 9158 }, { "epoch": 73.272, "grad_norm": 17.880809783935547, "learning_rate": 1.4888888888888888e-05, "loss": 0.59, "step": 9159 }, { "epoch": 73.28, "grad_norm": 28.67238426208496, "learning_rate": 1.4884444444444445e-05, "loss": 1.0295, "step": 9160 }, { "epoch": 73.288, "grad_norm": 19.74173927307129, "learning_rate": 1.488e-05, "loss": 0.9124, "step": 9161 }, { "epoch": 73.296, "grad_norm": 31.673681259155273, "learning_rate": 1.4875555555555556e-05, "loss": 0.9035, "step": 9162 }, { "epoch": 73.304, "grad_norm": 26.229154586791992, "learning_rate": 1.4871111111111111e-05, "loss": 0.7205, "step": 9163 }, { "epoch": 73.312, "grad_norm": 27.107860565185547, "learning_rate": 1.4866666666666668e-05, "loss": 0.9216, "step": 9164 }, { "epoch": 73.32, "grad_norm": 25.62652587890625, "learning_rate": 1.4862222222222223e-05, "loss": 0.9626, "step": 9165 }, { "epoch": 73.328, "grad_norm": 22.173513412475586, "learning_rate": 1.4857777777777779e-05, "loss": 0.563, "step": 9166 }, { "epoch": 73.336, "grad_norm": 14.633695602416992, "learning_rate": 1.4853333333333332e-05, "loss": 0.7328, "step": 9167 }, { "epoch": 73.344, "grad_norm": 14.7404203414917, "learning_rate": 1.484888888888889e-05, "loss": 0.5781, "step": 9168 }, { "epoch": 73.352, "grad_norm": 19.31357192993164, "learning_rate": 1.4844444444444444e-05, "loss": 0.7991, "step": 9169 }, { "epoch": 73.36, "grad_norm": 24.427335739135742, "learning_rate": 1.4840000000000002e-05, "loss": 0.863, "step": 9170 }, { "epoch": 73.368, "grad_norm": 17.134063720703125, "learning_rate": 1.4835555555555555e-05, "loss": 0.7319, "step": 9171 }, { "epoch": 73.376, "grad_norm": 23.42831802368164, "learning_rate": 1.4831111111111112e-05, "loss": 0.8437, "step": 9172 }, { "epoch": 73.384, "grad_norm": 15.539149284362793, "learning_rate": 1.4826666666666666e-05, "loss": 1.4363, "step": 9173 }, { "epoch": 73.392, "grad_norm": 185.57264709472656, "learning_rate": 1.4822222222222223e-05, "loss": 0.7773, "step": 9174 }, { "epoch": 73.4, "grad_norm": 23.892433166503906, "learning_rate": 1.4817777777777778e-05, "loss": 0.7675, "step": 9175 }, { "epoch": 73.408, "grad_norm": 37.41897964477539, "learning_rate": 1.4813333333333334e-05, "loss": 1.0264, "step": 9176 }, { "epoch": 73.416, "grad_norm": 24.779830932617188, "learning_rate": 1.480888888888889e-05, "loss": 0.9268, "step": 9177 }, { "epoch": 73.424, "grad_norm": 16.89499282836914, "learning_rate": 1.4804444444444446e-05, "loss": 0.9313, "step": 9178 }, { "epoch": 73.432, "grad_norm": 17.672924041748047, "learning_rate": 1.48e-05, "loss": 1.4027, "step": 9179 }, { "epoch": 73.44, "grad_norm": 30.78389549255371, "learning_rate": 1.4795555555555557e-05, "loss": 0.8219, "step": 9180 }, { "epoch": 73.448, "grad_norm": 25.491682052612305, "learning_rate": 1.4791111111111112e-05, "loss": 1.267, "step": 9181 }, { "epoch": 73.456, "grad_norm": 24.52678871154785, "learning_rate": 1.4786666666666669e-05, "loss": 0.8927, "step": 9182 }, { "epoch": 73.464, "grad_norm": 33.9382438659668, "learning_rate": 1.4782222222222222e-05, "loss": 2.0426, "step": 9183 }, { "epoch": 73.472, "grad_norm": 19.3314151763916, "learning_rate": 1.477777777777778e-05, "loss": 0.9199, "step": 9184 }, { "epoch": 73.48, "grad_norm": 20.596168518066406, "learning_rate": 1.4773333333333333e-05, "loss": 0.7001, "step": 9185 }, { "epoch": 73.488, "grad_norm": 24.499853134155273, "learning_rate": 1.476888888888889e-05, "loss": 0.7445, "step": 9186 }, { "epoch": 73.496, "grad_norm": 14.138348579406738, "learning_rate": 1.4764444444444444e-05, "loss": 0.7171, "step": 9187 }, { "epoch": 73.504, "grad_norm": 24.77707290649414, "learning_rate": 1.4760000000000001e-05, "loss": 0.6263, "step": 9188 }, { "epoch": 73.512, "grad_norm": 29.676193237304688, "learning_rate": 1.4755555555555556e-05, "loss": 1.1899, "step": 9189 }, { "epoch": 73.52, "grad_norm": 19.036218643188477, "learning_rate": 1.4751111111111112e-05, "loss": 1.3743, "step": 9190 }, { "epoch": 73.528, "grad_norm": 52.331085205078125, "learning_rate": 1.4746666666666667e-05, "loss": 0.5746, "step": 9191 }, { "epoch": 73.536, "grad_norm": 29.931941986083984, "learning_rate": 1.4742222222222224e-05, "loss": 1.4173, "step": 9192 }, { "epoch": 73.544, "grad_norm": 38.94394302368164, "learning_rate": 1.4737777777777779e-05, "loss": 0.8785, "step": 9193 }, { "epoch": 73.552, "grad_norm": 17.701732635498047, "learning_rate": 1.4733333333333335e-05, "loss": 0.9033, "step": 9194 }, { "epoch": 73.56, "grad_norm": 18.510509490966797, "learning_rate": 1.472888888888889e-05, "loss": 0.6984, "step": 9195 }, { "epoch": 73.568, "grad_norm": 26.7989444732666, "learning_rate": 1.4724444444444447e-05, "loss": 1.0619, "step": 9196 }, { "epoch": 73.576, "grad_norm": 17.375263214111328, "learning_rate": 1.472e-05, "loss": 1.194, "step": 9197 }, { "epoch": 73.584, "grad_norm": 50.25949478149414, "learning_rate": 1.4715555555555558e-05, "loss": 0.7156, "step": 9198 }, { "epoch": 73.592, "grad_norm": 22.71778106689453, "learning_rate": 1.4711111111111111e-05, "loss": 1.0645, "step": 9199 }, { "epoch": 73.6, "grad_norm": 17.05732536315918, "learning_rate": 1.470666666666667e-05, "loss": 0.8245, "step": 9200 }, { "epoch": 73.608, "grad_norm": 14.79897689819336, "learning_rate": 1.4702222222222222e-05, "loss": 1.6391, "step": 9201 }, { "epoch": 73.616, "grad_norm": 26.738229751586914, "learning_rate": 1.4697777777777779e-05, "loss": 0.8051, "step": 9202 }, { "epoch": 73.624, "grad_norm": 17.227340698242188, "learning_rate": 1.4693333333333334e-05, "loss": 0.8244, "step": 9203 }, { "epoch": 73.632, "grad_norm": 21.239295959472656, "learning_rate": 1.468888888888889e-05, "loss": 1.0723, "step": 9204 }, { "epoch": 73.64, "grad_norm": 42.839378356933594, "learning_rate": 1.4684444444444445e-05, "loss": 0.9117, "step": 9205 }, { "epoch": 73.648, "grad_norm": 19.935527801513672, "learning_rate": 1.4680000000000002e-05, "loss": 1.0702, "step": 9206 }, { "epoch": 73.656, "grad_norm": 29.199058532714844, "learning_rate": 1.4675555555555557e-05, "loss": 0.8088, "step": 9207 }, { "epoch": 73.664, "grad_norm": 72.03186798095703, "learning_rate": 1.467111111111111e-05, "loss": 1.1383, "step": 9208 }, { "epoch": 73.672, "grad_norm": 49.30316162109375, "learning_rate": 1.4666666666666668e-05, "loss": 1.1045, "step": 9209 }, { "epoch": 73.68, "grad_norm": 22.24881362915039, "learning_rate": 1.4662222222222221e-05, "loss": 0.9196, "step": 9210 }, { "epoch": 73.688, "grad_norm": 38.212345123291016, "learning_rate": 1.465777777777778e-05, "loss": 0.8487, "step": 9211 }, { "epoch": 73.696, "grad_norm": 18.49112319946289, "learning_rate": 1.4653333333333333e-05, "loss": 1.09, "step": 9212 }, { "epoch": 73.704, "grad_norm": 19.779376983642578, "learning_rate": 1.4648888888888889e-05, "loss": 0.8351, "step": 9213 }, { "epoch": 73.712, "grad_norm": 23.543163299560547, "learning_rate": 1.4644444444444444e-05, "loss": 0.8213, "step": 9214 }, { "epoch": 73.72, "grad_norm": 17.48585319519043, "learning_rate": 1.464e-05, "loss": 0.6602, "step": 9215 }, { "epoch": 73.728, "grad_norm": 48.71371841430664, "learning_rate": 1.4635555555555555e-05, "loss": 0.6319, "step": 9216 }, { "epoch": 73.736, "grad_norm": 38.29029846191406, "learning_rate": 1.4631111111111112e-05, "loss": 0.9277, "step": 9217 }, { "epoch": 73.744, "grad_norm": 23.48838233947754, "learning_rate": 1.4626666666666667e-05, "loss": 0.7518, "step": 9218 }, { "epoch": 73.752, "grad_norm": 39.783382415771484, "learning_rate": 1.4622222222222223e-05, "loss": 0.6006, "step": 9219 }, { "epoch": 73.76, "grad_norm": 33.00890350341797, "learning_rate": 1.4617777777777778e-05, "loss": 0.8622, "step": 9220 }, { "epoch": 73.768, "grad_norm": 23.063940048217773, "learning_rate": 1.4613333333333335e-05, "loss": 0.8729, "step": 9221 }, { "epoch": 73.776, "grad_norm": 11.502851486206055, "learning_rate": 1.460888888888889e-05, "loss": 0.8189, "step": 9222 }, { "epoch": 73.784, "grad_norm": 19.15219497680664, "learning_rate": 1.4604444444444446e-05, "loss": 0.5637, "step": 9223 }, { "epoch": 73.792, "grad_norm": 16.155330657958984, "learning_rate": 1.4599999999999999e-05, "loss": 1.0076, "step": 9224 }, { "epoch": 73.8, "grad_norm": 31.831159591674805, "learning_rate": 1.4595555555555557e-05, "loss": 0.6505, "step": 9225 }, { "epoch": 73.808, "grad_norm": 24.86466407775879, "learning_rate": 1.459111111111111e-05, "loss": 0.5592, "step": 9226 }, { "epoch": 73.816, "grad_norm": 15.595321655273438, "learning_rate": 1.4586666666666669e-05, "loss": 1.1748, "step": 9227 }, { "epoch": 73.824, "grad_norm": 60.84123992919922, "learning_rate": 1.4582222222222222e-05, "loss": 0.9628, "step": 9228 }, { "epoch": 73.832, "grad_norm": 26.17855453491211, "learning_rate": 1.4577777777777778e-05, "loss": 0.9076, "step": 9229 }, { "epoch": 73.84, "grad_norm": 16.594682693481445, "learning_rate": 1.4573333333333333e-05, "loss": 0.8974, "step": 9230 }, { "epoch": 73.848, "grad_norm": 25.696640014648438, "learning_rate": 1.456888888888889e-05, "loss": 0.9224, "step": 9231 }, { "epoch": 73.856, "grad_norm": 22.603944778442383, "learning_rate": 1.4564444444444445e-05, "loss": 0.6642, "step": 9232 }, { "epoch": 73.864, "grad_norm": 34.48024368286133, "learning_rate": 1.4560000000000001e-05, "loss": 0.4973, "step": 9233 }, { "epoch": 73.872, "grad_norm": 50.92998504638672, "learning_rate": 1.4555555555555556e-05, "loss": 0.6944, "step": 9234 }, { "epoch": 73.88, "grad_norm": 19.27631950378418, "learning_rate": 1.4551111111111113e-05, "loss": 0.7919, "step": 9235 }, { "epoch": 73.888, "grad_norm": 26.99009132385254, "learning_rate": 1.4546666666666667e-05, "loss": 0.4818, "step": 9236 }, { "epoch": 73.896, "grad_norm": 17.941869735717773, "learning_rate": 1.4542222222222224e-05, "loss": 0.7518, "step": 9237 }, { "epoch": 73.904, "grad_norm": 31.168922424316406, "learning_rate": 1.4537777777777777e-05, "loss": 0.9272, "step": 9238 }, { "epoch": 73.912, "grad_norm": 25.00088119506836, "learning_rate": 1.4533333333333335e-05, "loss": 0.85, "step": 9239 }, { "epoch": 73.92, "grad_norm": 22.419940948486328, "learning_rate": 1.4528888888888889e-05, "loss": 0.772, "step": 9240 }, { "epoch": 73.928, "grad_norm": 31.562442779541016, "learning_rate": 1.4524444444444447e-05, "loss": 1.0213, "step": 9241 }, { "epoch": 73.936, "grad_norm": 16.731821060180664, "learning_rate": 1.452e-05, "loss": 0.5944, "step": 9242 }, { "epoch": 73.944, "grad_norm": 34.0837287902832, "learning_rate": 1.4515555555555556e-05, "loss": 1.6719, "step": 9243 }, { "epoch": 73.952, "grad_norm": 36.79066848754883, "learning_rate": 1.4511111111111111e-05, "loss": 1.0577, "step": 9244 }, { "epoch": 73.96, "grad_norm": 21.76201057434082, "learning_rate": 1.4506666666666668e-05, "loss": 1.043, "step": 9245 }, { "epoch": 73.968, "grad_norm": 37.01383590698242, "learning_rate": 1.4502222222222223e-05, "loss": 1.0066, "step": 9246 }, { "epoch": 73.976, "grad_norm": 62.57576370239258, "learning_rate": 1.449777777777778e-05, "loss": 1.6629, "step": 9247 }, { "epoch": 73.984, "grad_norm": 28.007396697998047, "learning_rate": 1.4493333333333334e-05, "loss": 1.1337, "step": 9248 }, { "epoch": 73.992, "grad_norm": 37.173763275146484, "learning_rate": 1.448888888888889e-05, "loss": 0.5518, "step": 9249 }, { "epoch": 74.0, "grad_norm": 27.937423706054688, "learning_rate": 1.4484444444444445e-05, "loss": 1.099, "step": 9250 }, { "epoch": 74.0, "eval_loss": 0.9532605409622192, "eval_map": 0.4624, "eval_map_50": 0.8054, "eval_map_75": 0.4833, "eval_map_Coverall": 0.6628, "eval_map_Face_Shield": 0.568, "eval_map_Gloves": 0.3699, "eval_map_Goggles": 0.2599, "eval_map_Mask": 0.4513, "eval_map_large": 0.6619, "eval_map_medium": 0.352, "eval_map_small": 0.3518, "eval_mar_1": 0.3471, "eval_mar_10": 0.5922, "eval_mar_100": 0.6075, "eval_mar_100_Coverall": 0.7556, "eval_mar_100_Face_Shield": 0.7235, "eval_mar_100_Gloves": 0.482, "eval_mar_100_Goggles": 0.5281, "eval_mar_100_Mask": 0.5481, "eval_mar_large": 0.7642, "eval_mar_medium": 0.4977, "eval_mar_small": 0.4665, "eval_runtime": 0.929, "eval_samples_per_second": 31.217, "eval_steps_per_second": 2.153, "step": 9250 }, { "epoch": 74.008, "grad_norm": 124.80071258544922, "learning_rate": 1.4480000000000002e-05, "loss": 0.912, "step": 9251 }, { "epoch": 74.016, "grad_norm": 220.7178955078125, "learning_rate": 1.4475555555555557e-05, "loss": 0.7168, "step": 9252 }, { "epoch": 74.024, "grad_norm": 25.04460906982422, "learning_rate": 1.4471111111111113e-05, "loss": 0.7997, "step": 9253 }, { "epoch": 74.032, "grad_norm": 19.441226959228516, "learning_rate": 1.4466666666666667e-05, "loss": 0.8227, "step": 9254 }, { "epoch": 74.04, "grad_norm": 17.921720504760742, "learning_rate": 1.4462222222222225e-05, "loss": 0.7042, "step": 9255 }, { "epoch": 74.048, "grad_norm": 22.248306274414062, "learning_rate": 1.4457777777777778e-05, "loss": 0.5701, "step": 9256 }, { "epoch": 74.056, "grad_norm": 28.119857788085938, "learning_rate": 1.4453333333333336e-05, "loss": 1.3772, "step": 9257 }, { "epoch": 74.064, "grad_norm": 567.3524780273438, "learning_rate": 1.444888888888889e-05, "loss": 0.8884, "step": 9258 }, { "epoch": 74.072, "grad_norm": 31.06269073486328, "learning_rate": 1.4444444444444444e-05, "loss": 0.8606, "step": 9259 }, { "epoch": 74.08, "grad_norm": 19.510530471801758, "learning_rate": 1.444e-05, "loss": 1.2314, "step": 9260 }, { "epoch": 74.088, "grad_norm": 36.37300491333008, "learning_rate": 1.4435555555555556e-05, "loss": 0.7385, "step": 9261 }, { "epoch": 74.096, "grad_norm": 72.19977569580078, "learning_rate": 1.4431111111111112e-05, "loss": 0.5315, "step": 9262 }, { "epoch": 74.104, "grad_norm": 19.878515243530273, "learning_rate": 1.4426666666666667e-05, "loss": 0.8976, "step": 9263 }, { "epoch": 74.112, "grad_norm": 22.858572006225586, "learning_rate": 1.4422222222222223e-05, "loss": 0.7041, "step": 9264 }, { "epoch": 74.12, "grad_norm": 14.185462951660156, "learning_rate": 1.4417777777777777e-05, "loss": 0.7434, "step": 9265 }, { "epoch": 74.128, "grad_norm": 43.537296295166016, "learning_rate": 1.4413333333333335e-05, "loss": 0.8152, "step": 9266 }, { "epoch": 74.136, "grad_norm": 36.74695587158203, "learning_rate": 1.4408888888888888e-05, "loss": 0.916, "step": 9267 }, { "epoch": 74.144, "grad_norm": 14.267940521240234, "learning_rate": 1.4404444444444446e-05, "loss": 0.8101, "step": 9268 }, { "epoch": 74.152, "grad_norm": 34.279510498046875, "learning_rate": 1.44e-05, "loss": 1.7616, "step": 9269 }, { "epoch": 74.16, "grad_norm": 31.322498321533203, "learning_rate": 1.4395555555555556e-05, "loss": 0.8204, "step": 9270 }, { "epoch": 74.168, "grad_norm": 21.5903377532959, "learning_rate": 1.439111111111111e-05, "loss": 1.7329, "step": 9271 }, { "epoch": 74.176, "grad_norm": 18.86119842529297, "learning_rate": 1.4386666666666667e-05, "loss": 1.2166, "step": 9272 }, { "epoch": 74.184, "grad_norm": 24.19829559326172, "learning_rate": 1.4382222222222222e-05, "loss": 0.7604, "step": 9273 }, { "epoch": 74.192, "grad_norm": 40.7138557434082, "learning_rate": 1.4377777777777779e-05, "loss": 0.5939, "step": 9274 }, { "epoch": 74.2, "grad_norm": 28.210939407348633, "learning_rate": 1.4373333333333334e-05, "loss": 0.6576, "step": 9275 }, { "epoch": 74.208, "grad_norm": 77.88947296142578, "learning_rate": 1.436888888888889e-05, "loss": 0.6319, "step": 9276 }, { "epoch": 74.216, "grad_norm": 28.099075317382812, "learning_rate": 1.4364444444444445e-05, "loss": 0.6819, "step": 9277 }, { "epoch": 74.224, "grad_norm": 21.991363525390625, "learning_rate": 1.4360000000000001e-05, "loss": 0.7527, "step": 9278 }, { "epoch": 74.232, "grad_norm": 19.976505279541016, "learning_rate": 1.4355555555555556e-05, "loss": 0.9729, "step": 9279 }, { "epoch": 74.24, "grad_norm": 24.33202362060547, "learning_rate": 1.4351111111111113e-05, "loss": 0.6473, "step": 9280 }, { "epoch": 74.248, "grad_norm": 23.256853103637695, "learning_rate": 1.4346666666666666e-05, "loss": 1.8001, "step": 9281 }, { "epoch": 74.256, "grad_norm": 13.138835906982422, "learning_rate": 1.4342222222222224e-05, "loss": 1.1038, "step": 9282 }, { "epoch": 74.264, "grad_norm": 21.68073272705078, "learning_rate": 1.4337777777777777e-05, "loss": 0.8236, "step": 9283 }, { "epoch": 74.272, "grad_norm": 19.344499588012695, "learning_rate": 1.4333333333333334e-05, "loss": 0.7, "step": 9284 }, { "epoch": 74.28, "grad_norm": 18.306655883789062, "learning_rate": 1.4328888888888889e-05, "loss": 0.9242, "step": 9285 }, { "epoch": 74.288, "grad_norm": 35.02982711791992, "learning_rate": 1.4324444444444445e-05, "loss": 0.725, "step": 9286 }, { "epoch": 74.296, "grad_norm": 21.01913833618164, "learning_rate": 1.432e-05, "loss": 1.1572, "step": 9287 }, { "epoch": 74.304, "grad_norm": 103.90083312988281, "learning_rate": 1.4315555555555557e-05, "loss": 0.8576, "step": 9288 }, { "epoch": 74.312, "grad_norm": 21.754545211791992, "learning_rate": 1.4311111111111111e-05, "loss": 0.9623, "step": 9289 }, { "epoch": 74.32, "grad_norm": 55.86394500732422, "learning_rate": 1.4306666666666668e-05, "loss": 1.1669, "step": 9290 }, { "epoch": 74.328, "grad_norm": 78.55057525634766, "learning_rate": 1.4302222222222223e-05, "loss": 1.6435, "step": 9291 }, { "epoch": 74.336, "grad_norm": 56.373409271240234, "learning_rate": 1.429777777777778e-05, "loss": 0.8256, "step": 9292 }, { "epoch": 74.344, "grad_norm": 13.474743843078613, "learning_rate": 1.4293333333333334e-05, "loss": 0.6134, "step": 9293 }, { "epoch": 74.352, "grad_norm": 18.711973190307617, "learning_rate": 1.428888888888889e-05, "loss": 0.9601, "step": 9294 }, { "epoch": 74.36, "grad_norm": 17.189411163330078, "learning_rate": 1.4284444444444444e-05, "loss": 0.8328, "step": 9295 }, { "epoch": 74.368, "grad_norm": 27.841588973999023, "learning_rate": 1.4280000000000002e-05, "loss": 0.925, "step": 9296 }, { "epoch": 74.376, "grad_norm": 27.39866065979004, "learning_rate": 1.4275555555555555e-05, "loss": 0.6183, "step": 9297 }, { "epoch": 74.384, "grad_norm": 16.0148983001709, "learning_rate": 1.4271111111111114e-05, "loss": 1.0047, "step": 9298 }, { "epoch": 74.392, "grad_norm": 17.264604568481445, "learning_rate": 1.4266666666666667e-05, "loss": 0.7901, "step": 9299 }, { "epoch": 74.4, "grad_norm": 92.96815490722656, "learning_rate": 1.4262222222222223e-05, "loss": 0.8162, "step": 9300 }, { "epoch": 74.408, "grad_norm": 45.73798751831055, "learning_rate": 1.4257777777777778e-05, "loss": 1.026, "step": 9301 }, { "epoch": 74.416, "grad_norm": 16.94510269165039, "learning_rate": 1.4253333333333335e-05, "loss": 0.9681, "step": 9302 }, { "epoch": 74.424, "grad_norm": 21.059398651123047, "learning_rate": 1.424888888888889e-05, "loss": 1.2793, "step": 9303 }, { "epoch": 74.432, "grad_norm": 23.861854553222656, "learning_rate": 1.4244444444444446e-05, "loss": 0.5222, "step": 9304 }, { "epoch": 74.44, "grad_norm": 34.89462661743164, "learning_rate": 1.4240000000000001e-05, "loss": 0.5716, "step": 9305 }, { "epoch": 74.448, "grad_norm": 47.40544128417969, "learning_rate": 1.4235555555555557e-05, "loss": 0.7251, "step": 9306 }, { "epoch": 74.456, "grad_norm": 26.095531463623047, "learning_rate": 1.4231111111111112e-05, "loss": 0.6595, "step": 9307 }, { "epoch": 74.464, "grad_norm": 24.88098907470703, "learning_rate": 1.4226666666666669e-05, "loss": 0.9595, "step": 9308 }, { "epoch": 74.472, "grad_norm": 16.420429229736328, "learning_rate": 1.4222222222222224e-05, "loss": 0.8343, "step": 9309 }, { "epoch": 74.48, "grad_norm": 16.410419464111328, "learning_rate": 1.4217777777777777e-05, "loss": 0.878, "step": 9310 }, { "epoch": 74.488, "grad_norm": 22.419137954711914, "learning_rate": 1.4213333333333333e-05, "loss": 0.627, "step": 9311 }, { "epoch": 74.496, "grad_norm": 22.636085510253906, "learning_rate": 1.4208888888888888e-05, "loss": 1.2729, "step": 9312 }, { "epoch": 74.504, "grad_norm": 22.022933959960938, "learning_rate": 1.4204444444444445e-05, "loss": 0.8533, "step": 9313 }, { "epoch": 74.512, "grad_norm": 22.151721954345703, "learning_rate": 1.42e-05, "loss": 0.8234, "step": 9314 }, { "epoch": 74.52, "grad_norm": 41.120426177978516, "learning_rate": 1.4195555555555556e-05, "loss": 0.9343, "step": 9315 }, { "epoch": 74.528, "grad_norm": 25.27794647216797, "learning_rate": 1.4191111111111111e-05, "loss": 0.6964, "step": 9316 }, { "epoch": 74.536, "grad_norm": 19.806169509887695, "learning_rate": 1.4186666666666667e-05, "loss": 0.8973, "step": 9317 }, { "epoch": 74.544, "grad_norm": 26.772279739379883, "learning_rate": 1.4182222222222222e-05, "loss": 1.0198, "step": 9318 }, { "epoch": 74.552, "grad_norm": 35.428932189941406, "learning_rate": 1.4177777777777779e-05, "loss": 0.8177, "step": 9319 }, { "epoch": 74.56, "grad_norm": 20.701074600219727, "learning_rate": 1.4173333333333334e-05, "loss": 0.9784, "step": 9320 }, { "epoch": 74.568, "grad_norm": 27.255603790283203, "learning_rate": 1.416888888888889e-05, "loss": 0.5978, "step": 9321 }, { "epoch": 74.576, "grad_norm": 28.002140045166016, "learning_rate": 1.4164444444444443e-05, "loss": 1.0915, "step": 9322 }, { "epoch": 74.584, "grad_norm": 14.446413040161133, "learning_rate": 1.4160000000000002e-05, "loss": 0.8266, "step": 9323 }, { "epoch": 74.592, "grad_norm": 9.69032096862793, "learning_rate": 1.4155555555555555e-05, "loss": 1.2743, "step": 9324 }, { "epoch": 74.6, "grad_norm": 19.68779945373535, "learning_rate": 1.4151111111111113e-05, "loss": 0.7382, "step": 9325 }, { "epoch": 74.608, "grad_norm": 39.470420837402344, "learning_rate": 1.4146666666666666e-05, "loss": 0.5773, "step": 9326 }, { "epoch": 74.616, "grad_norm": 16.038028717041016, "learning_rate": 1.4142222222222223e-05, "loss": 1.0648, "step": 9327 }, { "epoch": 74.624, "grad_norm": 40.32278060913086, "learning_rate": 1.4137777777777778e-05, "loss": 1.436, "step": 9328 }, { "epoch": 74.632, "grad_norm": 25.367326736450195, "learning_rate": 1.4133333333333334e-05, "loss": 0.5851, "step": 9329 }, { "epoch": 74.64, "grad_norm": 69.78304290771484, "learning_rate": 1.4128888888888889e-05, "loss": 1.1487, "step": 9330 }, { "epoch": 74.648, "grad_norm": 31.80868911743164, "learning_rate": 1.4124444444444445e-05, "loss": 0.9128, "step": 9331 }, { "epoch": 74.656, "grad_norm": 20.870853424072266, "learning_rate": 1.412e-05, "loss": 0.6307, "step": 9332 }, { "epoch": 74.664, "grad_norm": 22.4947509765625, "learning_rate": 1.4115555555555557e-05, "loss": 1.0998, "step": 9333 }, { "epoch": 74.672, "grad_norm": 61.19297790527344, "learning_rate": 1.4111111111111112e-05, "loss": 0.956, "step": 9334 }, { "epoch": 74.68, "grad_norm": 24.076751708984375, "learning_rate": 1.4106666666666668e-05, "loss": 1.203, "step": 9335 }, { "epoch": 74.688, "grad_norm": 13.279186248779297, "learning_rate": 1.4102222222222221e-05, "loss": 1.0924, "step": 9336 }, { "epoch": 74.696, "grad_norm": 28.929372787475586, "learning_rate": 1.409777777777778e-05, "loss": 0.7309, "step": 9337 }, { "epoch": 74.704, "grad_norm": 43.20601272583008, "learning_rate": 1.4093333333333333e-05, "loss": 1.0066, "step": 9338 }, { "epoch": 74.712, "grad_norm": 19.3375186920166, "learning_rate": 1.4088888888888891e-05, "loss": 0.6368, "step": 9339 }, { "epoch": 74.72, "grad_norm": 17.0316219329834, "learning_rate": 1.4084444444444444e-05, "loss": 0.7729, "step": 9340 }, { "epoch": 74.728, "grad_norm": 19.856691360473633, "learning_rate": 1.408e-05, "loss": 1.231, "step": 9341 }, { "epoch": 74.736, "grad_norm": 16.462421417236328, "learning_rate": 1.4075555555555556e-05, "loss": 1.0802, "step": 9342 }, { "epoch": 74.744, "grad_norm": 31.807022094726562, "learning_rate": 1.4071111111111112e-05, "loss": 0.7704, "step": 9343 }, { "epoch": 74.752, "grad_norm": 19.467342376708984, "learning_rate": 1.4066666666666667e-05, "loss": 0.687, "step": 9344 }, { "epoch": 74.76, "grad_norm": 17.59173011779785, "learning_rate": 1.4062222222222223e-05, "loss": 1.5965, "step": 9345 }, { "epoch": 74.768, "grad_norm": 16.88700294494629, "learning_rate": 1.4057777777777778e-05, "loss": 0.6838, "step": 9346 }, { "epoch": 74.776, "grad_norm": 29.922338485717773, "learning_rate": 1.4053333333333335e-05, "loss": 1.283, "step": 9347 }, { "epoch": 74.784, "grad_norm": 20.102767944335938, "learning_rate": 1.404888888888889e-05, "loss": 0.7575, "step": 9348 }, { "epoch": 74.792, "grad_norm": 26.758127212524414, "learning_rate": 1.4044444444444446e-05, "loss": 0.7398, "step": 9349 }, { "epoch": 74.8, "grad_norm": 15.490999221801758, "learning_rate": 1.4040000000000001e-05, "loss": 0.7235, "step": 9350 }, { "epoch": 74.808, "grad_norm": 21.588041305541992, "learning_rate": 1.4035555555555558e-05, "loss": 0.8261, "step": 9351 }, { "epoch": 74.816, "grad_norm": 31.08133888244629, "learning_rate": 1.403111111111111e-05, "loss": 1.0263, "step": 9352 }, { "epoch": 74.824, "grad_norm": 18.403308868408203, "learning_rate": 1.4026666666666669e-05, "loss": 1.0988, "step": 9353 }, { "epoch": 74.832, "grad_norm": 25.35797691345215, "learning_rate": 1.4022222222222222e-05, "loss": 1.0629, "step": 9354 }, { "epoch": 74.84, "grad_norm": 30.625003814697266, "learning_rate": 1.401777777777778e-05, "loss": 0.834, "step": 9355 }, { "epoch": 74.848, "grad_norm": 15.43151569366455, "learning_rate": 1.4013333333333334e-05, "loss": 0.9451, "step": 9356 }, { "epoch": 74.856, "grad_norm": 19.164945602416992, "learning_rate": 1.400888888888889e-05, "loss": 0.9329, "step": 9357 }, { "epoch": 74.864, "grad_norm": 37.74068069458008, "learning_rate": 1.4004444444444445e-05, "loss": 2.6663, "step": 9358 }, { "epoch": 74.872, "grad_norm": 21.91874122619629, "learning_rate": 1.4000000000000001e-05, "loss": 0.85, "step": 9359 }, { "epoch": 74.88, "grad_norm": 30.13147735595703, "learning_rate": 1.3995555555555556e-05, "loss": 0.9019, "step": 9360 }, { "epoch": 74.888, "grad_norm": 11.00794506072998, "learning_rate": 1.3991111111111113e-05, "loss": 0.5365, "step": 9361 }, { "epoch": 74.896, "grad_norm": 31.577865600585938, "learning_rate": 1.3986666666666668e-05, "loss": 0.9512, "step": 9362 }, { "epoch": 74.904, "grad_norm": 13.043973922729492, "learning_rate": 1.3982222222222221e-05, "loss": 0.8885, "step": 9363 }, { "epoch": 74.912, "grad_norm": 34.56002426147461, "learning_rate": 1.3977777777777779e-05, "loss": 0.6917, "step": 9364 }, { "epoch": 74.92, "grad_norm": 16.98841667175293, "learning_rate": 1.3973333333333332e-05, "loss": 0.7133, "step": 9365 }, { "epoch": 74.928, "grad_norm": 40.30622863769531, "learning_rate": 1.396888888888889e-05, "loss": 0.7859, "step": 9366 }, { "epoch": 74.936, "grad_norm": 24.333200454711914, "learning_rate": 1.3964444444444444e-05, "loss": 0.7369, "step": 9367 }, { "epoch": 74.944, "grad_norm": 27.385841369628906, "learning_rate": 1.396e-05, "loss": 1.1391, "step": 9368 }, { "epoch": 74.952, "grad_norm": 21.59559440612793, "learning_rate": 1.3955555555555555e-05, "loss": 1.1791, "step": 9369 }, { "epoch": 74.96, "grad_norm": 22.060726165771484, "learning_rate": 1.3951111111111112e-05, "loss": 0.7917, "step": 9370 }, { "epoch": 74.968, "grad_norm": 20.408153533935547, "learning_rate": 1.3946666666666666e-05, "loss": 0.8931, "step": 9371 }, { "epoch": 74.976, "grad_norm": 22.82866096496582, "learning_rate": 1.3942222222222223e-05, "loss": 0.7733, "step": 9372 }, { "epoch": 74.984, "grad_norm": 24.580583572387695, "learning_rate": 1.3937777777777778e-05, "loss": 0.6323, "step": 9373 }, { "epoch": 74.992, "grad_norm": 26.074424743652344, "learning_rate": 1.3933333333333334e-05, "loss": 0.7616, "step": 9374 }, { "epoch": 75.0, "grad_norm": 30.586183547973633, "learning_rate": 1.392888888888889e-05, "loss": 2.0791, "step": 9375 }, { "epoch": 75.0, "eval_loss": 0.9523876309394836, "eval_map": 0.4462, "eval_map_50": 0.7862, "eval_map_75": 0.4371, "eval_map_Coverall": 0.6381, "eval_map_Face_Shield": 0.5278, "eval_map_Gloves": 0.3732, "eval_map_Goggles": 0.252, "eval_map_Mask": 0.4397, "eval_map_large": 0.6692, "eval_map_medium": 0.3175, "eval_map_small": 0.3386, "eval_mar_1": 0.3416, "eval_mar_10": 0.5871, "eval_mar_100": 0.6002, "eval_mar_100_Coverall": 0.72, "eval_mar_100_Face_Shield": 0.7176, "eval_mar_100_Gloves": 0.5098, "eval_mar_100_Goggles": 0.5188, "eval_mar_100_Mask": 0.5346, "eval_mar_large": 0.7876, "eval_mar_medium": 0.4939, "eval_mar_small": 0.4256, "eval_runtime": 1.0381, "eval_samples_per_second": 27.937, "eval_steps_per_second": 1.927, "step": 9375 }, { "epoch": 75.008, "grad_norm": 122.30094146728516, "learning_rate": 1.3924444444444446e-05, "loss": 1.4718, "step": 9376 }, { "epoch": 75.016, "grad_norm": 33.56639862060547, "learning_rate": 1.3919999999999999e-05, "loss": 0.8177, "step": 9377 }, { "epoch": 75.024, "grad_norm": 26.069408416748047, "learning_rate": 1.3915555555555557e-05, "loss": 0.6363, "step": 9378 }, { "epoch": 75.032, "grad_norm": 35.485477447509766, "learning_rate": 1.391111111111111e-05, "loss": 0.7238, "step": 9379 }, { "epoch": 75.04, "grad_norm": 17.079641342163086, "learning_rate": 1.3906666666666668e-05, "loss": 0.5902, "step": 9380 }, { "epoch": 75.048, "grad_norm": 17.5437068939209, "learning_rate": 1.3902222222222222e-05, "loss": 0.7395, "step": 9381 }, { "epoch": 75.056, "grad_norm": 15.768869400024414, "learning_rate": 1.3897777777777778e-05, "loss": 1.0393, "step": 9382 }, { "epoch": 75.064, "grad_norm": 20.479652404785156, "learning_rate": 1.3893333333333333e-05, "loss": 0.8644, "step": 9383 }, { "epoch": 75.072, "grad_norm": 15.605066299438477, "learning_rate": 1.388888888888889e-05, "loss": 0.5438, "step": 9384 }, { "epoch": 75.08, "grad_norm": 23.933650970458984, "learning_rate": 1.3884444444444444e-05, "loss": 0.6778, "step": 9385 }, { "epoch": 75.088, "grad_norm": 33.87880325317383, "learning_rate": 1.3880000000000001e-05, "loss": 1.6456, "step": 9386 }, { "epoch": 75.096, "grad_norm": 25.138906478881836, "learning_rate": 1.3875555555555556e-05, "loss": 0.7153, "step": 9387 }, { "epoch": 75.104, "grad_norm": 24.65760040283203, "learning_rate": 1.3871111111111112e-05, "loss": 0.7043, "step": 9388 }, { "epoch": 75.112, "grad_norm": 36.415679931640625, "learning_rate": 1.3866666666666667e-05, "loss": 0.6705, "step": 9389 }, { "epoch": 75.12, "grad_norm": 44.128089904785156, "learning_rate": 1.3862222222222224e-05, "loss": 0.9865, "step": 9390 }, { "epoch": 75.128, "grad_norm": 31.154949188232422, "learning_rate": 1.3857777777777779e-05, "loss": 0.8736, "step": 9391 }, { "epoch": 75.136, "grad_norm": 26.350210189819336, "learning_rate": 1.3853333333333335e-05, "loss": 0.7055, "step": 9392 }, { "epoch": 75.144, "grad_norm": 24.39931297302246, "learning_rate": 1.3848888888888888e-05, "loss": 1.1266, "step": 9393 }, { "epoch": 75.152, "grad_norm": 23.328107833862305, "learning_rate": 1.3844444444444446e-05, "loss": 0.8856, "step": 9394 }, { "epoch": 75.16, "grad_norm": 40.358314514160156, "learning_rate": 1.384e-05, "loss": 2.0771, "step": 9395 }, { "epoch": 75.168, "grad_norm": 28.04669952392578, "learning_rate": 1.3835555555555558e-05, "loss": 0.8388, "step": 9396 }, { "epoch": 75.176, "grad_norm": 31.76305389404297, "learning_rate": 1.3831111111111111e-05, "loss": 0.8536, "step": 9397 }, { "epoch": 75.184, "grad_norm": 17.930713653564453, "learning_rate": 1.3826666666666668e-05, "loss": 1.4543, "step": 9398 }, { "epoch": 75.192, "grad_norm": 27.396203994750977, "learning_rate": 1.3822222222222222e-05, "loss": 1.0554, "step": 9399 }, { "epoch": 75.2, "grad_norm": 22.5127010345459, "learning_rate": 1.3817777777777779e-05, "loss": 0.6902, "step": 9400 }, { "epoch": 75.208, "grad_norm": 46.26622009277344, "learning_rate": 1.3813333333333334e-05, "loss": 1.2079, "step": 9401 }, { "epoch": 75.216, "grad_norm": 54.77993392944336, "learning_rate": 1.380888888888889e-05, "loss": 1.1346, "step": 9402 }, { "epoch": 75.224, "grad_norm": 18.859516143798828, "learning_rate": 1.3804444444444445e-05, "loss": 0.7363, "step": 9403 }, { "epoch": 75.232, "grad_norm": 20.78395652770996, "learning_rate": 1.3800000000000002e-05, "loss": 1.0795, "step": 9404 }, { "epoch": 75.24, "grad_norm": 35.276153564453125, "learning_rate": 1.3795555555555557e-05, "loss": 0.8023, "step": 9405 }, { "epoch": 75.248, "grad_norm": 13.378756523132324, "learning_rate": 1.3791111111111113e-05, "loss": 0.8792, "step": 9406 }, { "epoch": 75.256, "grad_norm": 21.97043800354004, "learning_rate": 1.3786666666666668e-05, "loss": 0.8542, "step": 9407 }, { "epoch": 75.264, "grad_norm": 15.127357482910156, "learning_rate": 1.3782222222222224e-05, "loss": 1.0833, "step": 9408 }, { "epoch": 75.272, "grad_norm": 17.6025447845459, "learning_rate": 1.3777777777777778e-05, "loss": 0.7752, "step": 9409 }, { "epoch": 75.28, "grad_norm": 14.074918746948242, "learning_rate": 1.3773333333333336e-05, "loss": 0.9643, "step": 9410 }, { "epoch": 75.288, "grad_norm": 103.67351531982422, "learning_rate": 1.3768888888888889e-05, "loss": 0.6914, "step": 9411 }, { "epoch": 75.296, "grad_norm": 27.09979820251465, "learning_rate": 1.3764444444444447e-05, "loss": 1.3601, "step": 9412 }, { "epoch": 75.304, "grad_norm": 45.41838455200195, "learning_rate": 1.376e-05, "loss": 1.2079, "step": 9413 }, { "epoch": 75.312, "grad_norm": 36.51276397705078, "learning_rate": 1.3755555555555555e-05, "loss": 0.6021, "step": 9414 }, { "epoch": 75.32, "grad_norm": 71.1562728881836, "learning_rate": 1.3751111111111112e-05, "loss": 0.7948, "step": 9415 }, { "epoch": 75.328, "grad_norm": 21.41840934753418, "learning_rate": 1.3746666666666667e-05, "loss": 0.7042, "step": 9416 }, { "epoch": 75.336, "grad_norm": 25.045564651489258, "learning_rate": 1.3742222222222223e-05, "loss": 0.7943, "step": 9417 }, { "epoch": 75.344, "grad_norm": 14.130809783935547, "learning_rate": 1.3737777777777778e-05, "loss": 0.8742, "step": 9418 }, { "epoch": 75.352, "grad_norm": 25.538429260253906, "learning_rate": 1.3733333333333335e-05, "loss": 0.7655, "step": 9419 }, { "epoch": 75.36, "grad_norm": 79.3266830444336, "learning_rate": 1.3728888888888888e-05, "loss": 0.9922, "step": 9420 }, { "epoch": 75.368, "grad_norm": 17.63945198059082, "learning_rate": 1.3724444444444446e-05, "loss": 0.6739, "step": 9421 }, { "epoch": 75.376, "grad_norm": 29.63521957397461, "learning_rate": 1.3719999999999999e-05, "loss": 0.5932, "step": 9422 }, { "epoch": 75.384, "grad_norm": 38.84820556640625, "learning_rate": 1.3715555555555557e-05, "loss": 0.8099, "step": 9423 }, { "epoch": 75.392, "grad_norm": 25.900115966796875, "learning_rate": 1.371111111111111e-05, "loss": 1.3174, "step": 9424 }, { "epoch": 75.4, "grad_norm": 25.91193389892578, "learning_rate": 1.3706666666666667e-05, "loss": 0.908, "step": 9425 }, { "epoch": 75.408, "grad_norm": 23.25945472717285, "learning_rate": 1.3702222222222222e-05, "loss": 0.9098, "step": 9426 }, { "epoch": 75.416, "grad_norm": 20.95579719543457, "learning_rate": 1.3697777777777778e-05, "loss": 0.5703, "step": 9427 }, { "epoch": 75.424, "grad_norm": 37.93206024169922, "learning_rate": 1.3693333333333333e-05, "loss": 0.469, "step": 9428 }, { "epoch": 75.432, "grad_norm": 18.098546981811523, "learning_rate": 1.368888888888889e-05, "loss": 1.0983, "step": 9429 }, { "epoch": 75.44, "grad_norm": 14.077812194824219, "learning_rate": 1.3684444444444445e-05, "loss": 0.7679, "step": 9430 }, { "epoch": 75.448, "grad_norm": 24.338956832885742, "learning_rate": 1.3680000000000001e-05, "loss": 0.5871, "step": 9431 }, { "epoch": 75.456, "grad_norm": 34.09651184082031, "learning_rate": 1.3675555555555556e-05, "loss": 1.5914, "step": 9432 }, { "epoch": 75.464, "grad_norm": 37.633216857910156, "learning_rate": 1.3671111111111113e-05, "loss": 0.8576, "step": 9433 }, { "epoch": 75.472, "grad_norm": 27.732162475585938, "learning_rate": 1.3666666666666666e-05, "loss": 0.8268, "step": 9434 }, { "epoch": 75.48, "grad_norm": 19.62097930908203, "learning_rate": 1.3662222222222224e-05, "loss": 0.9636, "step": 9435 }, { "epoch": 75.488, "grad_norm": 65.18794250488281, "learning_rate": 1.3657777777777777e-05, "loss": 0.7489, "step": 9436 }, { "epoch": 75.496, "grad_norm": 27.15387725830078, "learning_rate": 1.3653333333333335e-05, "loss": 0.9457, "step": 9437 }, { "epoch": 75.504, "grad_norm": 23.986732482910156, "learning_rate": 1.3648888888888888e-05, "loss": 0.9472, "step": 9438 }, { "epoch": 75.512, "grad_norm": 19.309200286865234, "learning_rate": 1.3644444444444445e-05, "loss": 0.7037, "step": 9439 }, { "epoch": 75.52, "grad_norm": 21.46267318725586, "learning_rate": 1.364e-05, "loss": 0.7681, "step": 9440 }, { "epoch": 75.528, "grad_norm": 35.498722076416016, "learning_rate": 1.3635555555555556e-05, "loss": 0.7244, "step": 9441 }, { "epoch": 75.536, "grad_norm": 34.213016510009766, "learning_rate": 1.3631111111111111e-05, "loss": 0.9575, "step": 9442 }, { "epoch": 75.544, "grad_norm": 22.42455291748047, "learning_rate": 1.3626666666666668e-05, "loss": 0.8215, "step": 9443 }, { "epoch": 75.552, "grad_norm": 18.833740234375, "learning_rate": 1.3622222222222223e-05, "loss": 0.9832, "step": 9444 }, { "epoch": 75.56, "grad_norm": 33.64168930053711, "learning_rate": 1.361777777777778e-05, "loss": 0.6761, "step": 9445 }, { "epoch": 75.568, "grad_norm": 18.736713409423828, "learning_rate": 1.3613333333333334e-05, "loss": 0.8985, "step": 9446 }, { "epoch": 75.576, "grad_norm": 18.629919052124023, "learning_rate": 1.360888888888889e-05, "loss": 0.8032, "step": 9447 }, { "epoch": 75.584, "grad_norm": 23.900959014892578, "learning_rate": 1.3604444444444445e-05, "loss": 0.7248, "step": 9448 }, { "epoch": 75.592, "grad_norm": 39.075469970703125, "learning_rate": 1.3600000000000002e-05, "loss": 1.6092, "step": 9449 }, { "epoch": 75.6, "grad_norm": 19.27458953857422, "learning_rate": 1.3595555555555555e-05, "loss": 0.9815, "step": 9450 }, { "epoch": 75.608, "grad_norm": 33.97594451904297, "learning_rate": 1.3591111111111113e-05, "loss": 0.9946, "step": 9451 }, { "epoch": 75.616, "grad_norm": 49.947322845458984, "learning_rate": 1.3586666666666666e-05, "loss": 0.8942, "step": 9452 }, { "epoch": 75.624, "grad_norm": 109.9508285522461, "learning_rate": 1.3582222222222225e-05, "loss": 1.14, "step": 9453 }, { "epoch": 75.632, "grad_norm": 22.30447769165039, "learning_rate": 1.3577777777777778e-05, "loss": 0.9646, "step": 9454 }, { "epoch": 75.64, "grad_norm": 20.00797462463379, "learning_rate": 1.3573333333333334e-05, "loss": 0.7818, "step": 9455 }, { "epoch": 75.648, "grad_norm": 11.758262634277344, "learning_rate": 1.356888888888889e-05, "loss": 0.6472, "step": 9456 }, { "epoch": 75.656, "grad_norm": 12.266282081604004, "learning_rate": 1.3564444444444446e-05, "loss": 0.7543, "step": 9457 }, { "epoch": 75.664, "grad_norm": 46.54283905029297, "learning_rate": 1.356e-05, "loss": 0.6707, "step": 9458 }, { "epoch": 75.672, "grad_norm": 29.51721954345703, "learning_rate": 1.3555555555555557e-05, "loss": 1.2593, "step": 9459 }, { "epoch": 75.68, "grad_norm": 40.32920455932617, "learning_rate": 1.3551111111111112e-05, "loss": 0.9304, "step": 9460 }, { "epoch": 75.688, "grad_norm": 24.566120147705078, "learning_rate": 1.3546666666666669e-05, "loss": 1.1321, "step": 9461 }, { "epoch": 75.696, "grad_norm": 25.451017379760742, "learning_rate": 1.3542222222222223e-05, "loss": 0.7477, "step": 9462 }, { "epoch": 75.704, "grad_norm": 26.249929428100586, "learning_rate": 1.353777777777778e-05, "loss": 0.5937, "step": 9463 }, { "epoch": 75.712, "grad_norm": 59.48712921142578, "learning_rate": 1.3533333333333335e-05, "loss": 1.3384, "step": 9464 }, { "epoch": 75.72, "grad_norm": 161.043212890625, "learning_rate": 1.3528888888888888e-05, "loss": 1.0846, "step": 9465 }, { "epoch": 75.728, "grad_norm": 52.232357025146484, "learning_rate": 1.3524444444444444e-05, "loss": 1.2006, "step": 9466 }, { "epoch": 75.736, "grad_norm": 76.43598937988281, "learning_rate": 1.352e-05, "loss": 0.6632, "step": 9467 }, { "epoch": 75.744, "grad_norm": 33.5642204284668, "learning_rate": 1.3515555555555556e-05, "loss": 0.7991, "step": 9468 }, { "epoch": 75.752, "grad_norm": 32.100406646728516, "learning_rate": 1.351111111111111e-05, "loss": 0.8745, "step": 9469 }, { "epoch": 75.76, "grad_norm": 25.901103973388672, "learning_rate": 1.3506666666666667e-05, "loss": 0.8719, "step": 9470 }, { "epoch": 75.768, "grad_norm": 34.05946731567383, "learning_rate": 1.3502222222222222e-05, "loss": 0.75, "step": 9471 }, { "epoch": 75.776, "grad_norm": 148.22862243652344, "learning_rate": 1.3497777777777779e-05, "loss": 1.0692, "step": 9472 }, { "epoch": 75.784, "grad_norm": 33.82183837890625, "learning_rate": 1.3493333333333333e-05, "loss": 0.788, "step": 9473 }, { "epoch": 75.792, "grad_norm": 27.21419906616211, "learning_rate": 1.348888888888889e-05, "loss": 0.7587, "step": 9474 }, { "epoch": 75.8, "grad_norm": 25.69748306274414, "learning_rate": 1.3484444444444443e-05, "loss": 0.8843, "step": 9475 }, { "epoch": 75.808, "grad_norm": 20.998188018798828, "learning_rate": 1.3480000000000001e-05, "loss": 0.8854, "step": 9476 }, { "epoch": 75.816, "grad_norm": 67.34236145019531, "learning_rate": 1.3475555555555555e-05, "loss": 0.7565, "step": 9477 }, { "epoch": 75.824, "grad_norm": 23.322599411010742, "learning_rate": 1.3471111111111113e-05, "loss": 0.9052, "step": 9478 }, { "epoch": 75.832, "grad_norm": 15.061856269836426, "learning_rate": 1.3466666666666666e-05, "loss": 1.0658, "step": 9479 }, { "epoch": 75.84, "grad_norm": 15.313164710998535, "learning_rate": 1.3462222222222222e-05, "loss": 0.6774, "step": 9480 }, { "epoch": 75.848, "grad_norm": 38.442203521728516, "learning_rate": 1.3457777777777777e-05, "loss": 1.1548, "step": 9481 }, { "epoch": 75.856, "grad_norm": 17.646692276000977, "learning_rate": 1.3453333333333334e-05, "loss": 0.8521, "step": 9482 }, { "epoch": 75.864, "grad_norm": 65.9627685546875, "learning_rate": 1.3448888888888889e-05, "loss": 2.4352, "step": 9483 }, { "epoch": 75.872, "grad_norm": 25.750288009643555, "learning_rate": 1.3444444444444445e-05, "loss": 0.8607, "step": 9484 }, { "epoch": 75.88, "grad_norm": 45.208988189697266, "learning_rate": 1.344e-05, "loss": 0.7469, "step": 9485 }, { "epoch": 75.888, "grad_norm": 23.320819854736328, "learning_rate": 1.3435555555555557e-05, "loss": 0.9169, "step": 9486 }, { "epoch": 75.896, "grad_norm": 22.301368713378906, "learning_rate": 1.3431111111111111e-05, "loss": 1.1316, "step": 9487 }, { "epoch": 75.904, "grad_norm": 24.990116119384766, "learning_rate": 1.3426666666666668e-05, "loss": 0.755, "step": 9488 }, { "epoch": 75.912, "grad_norm": 15.207877159118652, "learning_rate": 1.3422222222222223e-05, "loss": 0.8487, "step": 9489 }, { "epoch": 75.92, "grad_norm": 22.71483039855957, "learning_rate": 1.341777777777778e-05, "loss": 0.8596, "step": 9490 }, { "epoch": 75.928, "grad_norm": 24.359052658081055, "learning_rate": 1.3413333333333333e-05, "loss": 0.7811, "step": 9491 }, { "epoch": 75.936, "grad_norm": 81.0102310180664, "learning_rate": 1.340888888888889e-05, "loss": 1.8579, "step": 9492 }, { "epoch": 75.944, "grad_norm": 24.875112533569336, "learning_rate": 1.3404444444444444e-05, "loss": 0.7343, "step": 9493 }, { "epoch": 75.952, "grad_norm": 24.801006317138672, "learning_rate": 1.3400000000000002e-05, "loss": 1.0514, "step": 9494 }, { "epoch": 75.96, "grad_norm": 23.246915817260742, "learning_rate": 1.3395555555555555e-05, "loss": 0.5025, "step": 9495 }, { "epoch": 75.968, "grad_norm": 52.48320388793945, "learning_rate": 1.3391111111111112e-05, "loss": 0.6471, "step": 9496 }, { "epoch": 75.976, "grad_norm": 29.36512565612793, "learning_rate": 1.3386666666666667e-05, "loss": 0.8521, "step": 9497 }, { "epoch": 75.984, "grad_norm": 14.485161781311035, "learning_rate": 1.3382222222222223e-05, "loss": 0.7768, "step": 9498 }, { "epoch": 75.992, "grad_norm": 16.230934143066406, "learning_rate": 1.3377777777777778e-05, "loss": 0.5563, "step": 9499 }, { "epoch": 76.0, "grad_norm": 11.604386329650879, "learning_rate": 1.3373333333333335e-05, "loss": 0.888, "step": 9500 }, { "epoch": 76.0, "eval_loss": 0.9505503177642822, "eval_map": 0.4679, "eval_map_50": 0.8, "eval_map_75": 0.482, "eval_map_Coverall": 0.6762, "eval_map_Face_Shield": 0.5352, "eval_map_Gloves": 0.3929, "eval_map_Goggles": 0.2667, "eval_map_Mask": 0.4684, "eval_map_large": 0.6676, "eval_map_medium": 0.3563, "eval_map_small": 0.3541, "eval_mar_1": 0.3461, "eval_mar_10": 0.5946, "eval_mar_100": 0.6049, "eval_mar_100_Coverall": 0.7467, "eval_mar_100_Face_Shield": 0.7059, "eval_mar_100_Gloves": 0.518, "eval_mar_100_Goggles": 0.5156, "eval_mar_100_Mask": 0.5385, "eval_mar_large": 0.7576, "eval_mar_medium": 0.499, "eval_mar_small": 0.4348, "eval_runtime": 0.9311, "eval_samples_per_second": 31.145, "eval_steps_per_second": 2.148, "step": 9500 }, { "epoch": 76.008, "grad_norm": 17.36040496826172, "learning_rate": 1.336888888888889e-05, "loss": 0.4995, "step": 9501 }, { "epoch": 76.016, "grad_norm": 37.00054168701172, "learning_rate": 1.3364444444444446e-05, "loss": 0.8547, "step": 9502 }, { "epoch": 76.024, "grad_norm": 28.95455551147461, "learning_rate": 1.336e-05, "loss": 0.7594, "step": 9503 }, { "epoch": 76.032, "grad_norm": 80.27146911621094, "learning_rate": 1.3355555555555557e-05, "loss": 1.1153, "step": 9504 }, { "epoch": 76.04, "grad_norm": 28.92587661743164, "learning_rate": 1.3351111111111112e-05, "loss": 0.8377, "step": 9505 }, { "epoch": 76.048, "grad_norm": 16.63625717163086, "learning_rate": 1.3346666666666669e-05, "loss": 0.8112, "step": 9506 }, { "epoch": 76.056, "grad_norm": 48.82495880126953, "learning_rate": 1.3342222222222222e-05, "loss": 0.8923, "step": 9507 }, { "epoch": 76.064, "grad_norm": 19.151145935058594, "learning_rate": 1.333777777777778e-05, "loss": 0.9427, "step": 9508 }, { "epoch": 76.072, "grad_norm": 22.459800720214844, "learning_rate": 1.3333333333333333e-05, "loss": 1.0507, "step": 9509 }, { "epoch": 76.08, "grad_norm": 21.845504760742188, "learning_rate": 1.3328888888888892e-05, "loss": 0.8261, "step": 9510 }, { "epoch": 76.088, "grad_norm": 25.07901382446289, "learning_rate": 1.3324444444444445e-05, "loss": 0.8092, "step": 9511 }, { "epoch": 76.096, "grad_norm": 17.75140953063965, "learning_rate": 1.3320000000000001e-05, "loss": 0.8511, "step": 9512 }, { "epoch": 76.104, "grad_norm": 56.094573974609375, "learning_rate": 1.3315555555555556e-05, "loss": 1.2142, "step": 9513 }, { "epoch": 76.112, "grad_norm": 22.49619483947754, "learning_rate": 1.3311111111111113e-05, "loss": 1.0597, "step": 9514 }, { "epoch": 76.12, "grad_norm": 23.28896141052246, "learning_rate": 1.3306666666666667e-05, "loss": 1.0331, "step": 9515 }, { "epoch": 76.128, "grad_norm": 31.79374885559082, "learning_rate": 1.3302222222222222e-05, "loss": 1.7626, "step": 9516 }, { "epoch": 76.136, "grad_norm": 24.199033737182617, "learning_rate": 1.3297777777777779e-05, "loss": 1.1916, "step": 9517 }, { "epoch": 76.144, "grad_norm": 19.91318702697754, "learning_rate": 1.3293333333333332e-05, "loss": 0.6804, "step": 9518 }, { "epoch": 76.152, "grad_norm": 12.231369018554688, "learning_rate": 1.328888888888889e-05, "loss": 0.8843, "step": 9519 }, { "epoch": 76.16, "grad_norm": 28.225116729736328, "learning_rate": 1.3284444444444443e-05, "loss": 0.9493, "step": 9520 }, { "epoch": 76.168, "grad_norm": 15.36866569519043, "learning_rate": 1.3280000000000002e-05, "loss": 1.8632, "step": 9521 }, { "epoch": 76.176, "grad_norm": 19.798175811767578, "learning_rate": 1.3275555555555555e-05, "loss": 0.782, "step": 9522 }, { "epoch": 76.184, "grad_norm": 42.36103820800781, "learning_rate": 1.3271111111111111e-05, "loss": 0.7426, "step": 9523 }, { "epoch": 76.192, "grad_norm": 36.894596099853516, "learning_rate": 1.3266666666666666e-05, "loss": 2.1441, "step": 9524 }, { "epoch": 76.2, "grad_norm": 19.934608459472656, "learning_rate": 1.3262222222222223e-05, "loss": 0.967, "step": 9525 }, { "epoch": 76.208, "grad_norm": 22.654075622558594, "learning_rate": 1.3257777777777778e-05, "loss": 0.9269, "step": 9526 }, { "epoch": 76.216, "grad_norm": 43.41971969604492, "learning_rate": 1.3253333333333334e-05, "loss": 1.0548, "step": 9527 }, { "epoch": 76.224, "grad_norm": 29.18045425415039, "learning_rate": 1.3248888888888889e-05, "loss": 0.8621, "step": 9528 }, { "epoch": 76.232, "grad_norm": 24.826906204223633, "learning_rate": 1.3244444444444445e-05, "loss": 0.8206, "step": 9529 }, { "epoch": 76.24, "grad_norm": 19.172758102416992, "learning_rate": 1.324e-05, "loss": 0.9317, "step": 9530 }, { "epoch": 76.248, "grad_norm": 39.98697280883789, "learning_rate": 1.3235555555555557e-05, "loss": 1.0548, "step": 9531 }, { "epoch": 76.256, "grad_norm": 18.801738739013672, "learning_rate": 1.323111111111111e-05, "loss": 0.5319, "step": 9532 }, { "epoch": 76.264, "grad_norm": 35.211822509765625, "learning_rate": 1.3226666666666668e-05, "loss": 0.5585, "step": 9533 }, { "epoch": 76.272, "grad_norm": 34.843597412109375, "learning_rate": 1.3222222222222221e-05, "loss": 0.8836, "step": 9534 }, { "epoch": 76.28, "grad_norm": 506.6618347167969, "learning_rate": 1.321777777777778e-05, "loss": 0.739, "step": 9535 }, { "epoch": 76.288, "grad_norm": 25.20528221130371, "learning_rate": 1.3213333333333333e-05, "loss": 0.9041, "step": 9536 }, { "epoch": 76.296, "grad_norm": 25.846460342407227, "learning_rate": 1.320888888888889e-05, "loss": 0.9236, "step": 9537 }, { "epoch": 76.304, "grad_norm": 16.528297424316406, "learning_rate": 1.3204444444444444e-05, "loss": 1.0608, "step": 9538 }, { "epoch": 76.312, "grad_norm": 41.03108596801758, "learning_rate": 1.32e-05, "loss": 1.109, "step": 9539 }, { "epoch": 76.32, "grad_norm": 22.412311553955078, "learning_rate": 1.3195555555555556e-05, "loss": 0.667, "step": 9540 }, { "epoch": 76.328, "grad_norm": 24.00981330871582, "learning_rate": 1.3191111111111112e-05, "loss": 0.8687, "step": 9541 }, { "epoch": 76.336, "grad_norm": 24.071365356445312, "learning_rate": 1.3186666666666667e-05, "loss": 0.8883, "step": 9542 }, { "epoch": 76.344, "grad_norm": 16.13439178466797, "learning_rate": 1.3182222222222223e-05, "loss": 0.7843, "step": 9543 }, { "epoch": 76.352, "grad_norm": 25.08465576171875, "learning_rate": 1.3177777777777778e-05, "loss": 0.6906, "step": 9544 }, { "epoch": 76.36, "grad_norm": 16.1309814453125, "learning_rate": 1.3173333333333335e-05, "loss": 0.6284, "step": 9545 }, { "epoch": 76.368, "grad_norm": 15.237924575805664, "learning_rate": 1.316888888888889e-05, "loss": 0.7618, "step": 9546 }, { "epoch": 76.376, "grad_norm": 21.129756927490234, "learning_rate": 1.3164444444444446e-05, "loss": 0.9094, "step": 9547 }, { "epoch": 76.384, "grad_norm": 18.018781661987305, "learning_rate": 1.316e-05, "loss": 0.972, "step": 9548 }, { "epoch": 76.392, "grad_norm": 17.642593383789062, "learning_rate": 1.3155555555555558e-05, "loss": 0.8039, "step": 9549 }, { "epoch": 76.4, "grad_norm": 55.92747116088867, "learning_rate": 1.315111111111111e-05, "loss": 1.239, "step": 9550 }, { "epoch": 76.408, "grad_norm": 17.391372680664062, "learning_rate": 1.3146666666666669e-05, "loss": 1.0054, "step": 9551 }, { "epoch": 76.416, "grad_norm": 23.15078353881836, "learning_rate": 1.3142222222222222e-05, "loss": 0.9558, "step": 9552 }, { "epoch": 76.424, "grad_norm": 26.645143508911133, "learning_rate": 1.3137777777777779e-05, "loss": 0.5841, "step": 9553 }, { "epoch": 76.432, "grad_norm": 23.59929847717285, "learning_rate": 1.3133333333333334e-05, "loss": 1.0165, "step": 9554 }, { "epoch": 76.44, "grad_norm": 26.15386199951172, "learning_rate": 1.312888888888889e-05, "loss": 0.7078, "step": 9555 }, { "epoch": 76.448, "grad_norm": 19.869857788085938, "learning_rate": 1.3124444444444445e-05, "loss": 0.6888, "step": 9556 }, { "epoch": 76.456, "grad_norm": 40.248374938964844, "learning_rate": 1.3120000000000001e-05, "loss": 0.7813, "step": 9557 }, { "epoch": 76.464, "grad_norm": 12.117414474487305, "learning_rate": 1.3115555555555556e-05, "loss": 0.8332, "step": 9558 }, { "epoch": 76.472, "grad_norm": 49.4061279296875, "learning_rate": 1.3111111111111113e-05, "loss": 0.9697, "step": 9559 }, { "epoch": 76.48, "grad_norm": 22.84295082092285, "learning_rate": 1.3106666666666668e-05, "loss": 0.6392, "step": 9560 }, { "epoch": 76.488, "grad_norm": 18.666847229003906, "learning_rate": 1.3102222222222224e-05, "loss": 0.9497, "step": 9561 }, { "epoch": 76.496, "grad_norm": 22.72048568725586, "learning_rate": 1.3097777777777779e-05, "loss": 0.7005, "step": 9562 }, { "epoch": 76.504, "grad_norm": 29.197362899780273, "learning_rate": 1.3093333333333336e-05, "loss": 1.0333, "step": 9563 }, { "epoch": 76.512, "grad_norm": 23.78094482421875, "learning_rate": 1.3088888888888889e-05, "loss": 0.8506, "step": 9564 }, { "epoch": 76.52, "grad_norm": 14.263127326965332, "learning_rate": 1.3084444444444447e-05, "loss": 1.1142, "step": 9565 }, { "epoch": 76.528, "grad_norm": 35.716712951660156, "learning_rate": 1.308e-05, "loss": 0.7116, "step": 9566 }, { "epoch": 76.536, "grad_norm": 18.257816314697266, "learning_rate": 1.3075555555555558e-05, "loss": 0.6315, "step": 9567 }, { "epoch": 76.544, "grad_norm": 18.192384719848633, "learning_rate": 1.3071111111111112e-05, "loss": 0.9056, "step": 9568 }, { "epoch": 76.552, "grad_norm": 32.82132339477539, "learning_rate": 1.3066666666666666e-05, "loss": 0.6139, "step": 9569 }, { "epoch": 76.56, "grad_norm": 22.675594329833984, "learning_rate": 1.3062222222222223e-05, "loss": 0.9465, "step": 9570 }, { "epoch": 76.568, "grad_norm": 28.273740768432617, "learning_rate": 1.3057777777777778e-05, "loss": 0.7399, "step": 9571 }, { "epoch": 76.576, "grad_norm": 14.906533241271973, "learning_rate": 1.3053333333333334e-05, "loss": 0.6965, "step": 9572 }, { "epoch": 76.584, "grad_norm": 19.476730346679688, "learning_rate": 1.3048888888888887e-05, "loss": 0.9804, "step": 9573 }, { "epoch": 76.592, "grad_norm": 103.10712432861328, "learning_rate": 1.3044444444444446e-05, "loss": 0.8876, "step": 9574 }, { "epoch": 76.6, "grad_norm": 14.038655281066895, "learning_rate": 1.3039999999999999e-05, "loss": 1.0112, "step": 9575 }, { "epoch": 76.608, "grad_norm": 30.025554656982422, "learning_rate": 1.3035555555555557e-05, "loss": 0.7503, "step": 9576 }, { "epoch": 76.616, "grad_norm": 22.192211151123047, "learning_rate": 1.303111111111111e-05, "loss": 1.0146, "step": 9577 }, { "epoch": 76.624, "grad_norm": 65.62073516845703, "learning_rate": 1.3026666666666667e-05, "loss": 1.0731, "step": 9578 }, { "epoch": 76.632, "grad_norm": 21.71491241455078, "learning_rate": 1.3022222222222222e-05, "loss": 0.9902, "step": 9579 }, { "epoch": 76.64, "grad_norm": 24.776092529296875, "learning_rate": 1.3017777777777778e-05, "loss": 0.7903, "step": 9580 }, { "epoch": 76.648, "grad_norm": 25.404468536376953, "learning_rate": 1.3013333333333333e-05, "loss": 1.004, "step": 9581 }, { "epoch": 76.656, "grad_norm": 51.83455276489258, "learning_rate": 1.300888888888889e-05, "loss": 0.6482, "step": 9582 }, { "epoch": 76.664, "grad_norm": 33.40376663208008, "learning_rate": 1.3004444444444444e-05, "loss": 0.8905, "step": 9583 }, { "epoch": 76.672, "grad_norm": 20.637916564941406, "learning_rate": 1.3000000000000001e-05, "loss": 0.7229, "step": 9584 }, { "epoch": 76.68, "grad_norm": 30.628921508789062, "learning_rate": 1.2995555555555556e-05, "loss": 0.7753, "step": 9585 }, { "epoch": 76.688, "grad_norm": 58.585731506347656, "learning_rate": 1.2991111111111112e-05, "loss": 0.65, "step": 9586 }, { "epoch": 76.696, "grad_norm": 23.227684020996094, "learning_rate": 1.2986666666666667e-05, "loss": 0.9183, "step": 9587 }, { "epoch": 76.704, "grad_norm": 17.518070220947266, "learning_rate": 1.2982222222222224e-05, "loss": 1.1717, "step": 9588 }, { "epoch": 76.712, "grad_norm": 22.78864288330078, "learning_rate": 1.2977777777777777e-05, "loss": 0.9937, "step": 9589 }, { "epoch": 76.72, "grad_norm": 15.550832748413086, "learning_rate": 1.2973333333333335e-05, "loss": 0.5976, "step": 9590 }, { "epoch": 76.728, "grad_norm": 40.02885818481445, "learning_rate": 1.2968888888888888e-05, "loss": 0.6809, "step": 9591 }, { "epoch": 76.736, "grad_norm": 19.538436889648438, "learning_rate": 1.2964444444444446e-05, "loss": 0.9768, "step": 9592 }, { "epoch": 76.744, "grad_norm": 49.135276794433594, "learning_rate": 1.296e-05, "loss": 0.7874, "step": 9593 }, { "epoch": 76.752, "grad_norm": 42.25922393798828, "learning_rate": 1.2955555555555556e-05, "loss": 1.336, "step": 9594 }, { "epoch": 76.76, "grad_norm": 16.23433494567871, "learning_rate": 1.2951111111111111e-05, "loss": 1.158, "step": 9595 }, { "epoch": 76.768, "grad_norm": 20.638992309570312, "learning_rate": 1.2946666666666668e-05, "loss": 0.4592, "step": 9596 }, { "epoch": 76.776, "grad_norm": 23.708660125732422, "learning_rate": 1.2942222222222222e-05, "loss": 0.8401, "step": 9597 }, { "epoch": 76.784, "grad_norm": 31.80971908569336, "learning_rate": 1.2937777777777779e-05, "loss": 0.7186, "step": 9598 }, { "epoch": 76.792, "grad_norm": 73.031005859375, "learning_rate": 1.2933333333333334e-05, "loss": 0.4864, "step": 9599 }, { "epoch": 76.8, "grad_norm": 22.77275848388672, "learning_rate": 1.292888888888889e-05, "loss": 1.067, "step": 9600 }, { "epoch": 76.808, "grad_norm": 17.53231430053711, "learning_rate": 1.2924444444444445e-05, "loss": 1.0024, "step": 9601 }, { "epoch": 76.816, "grad_norm": 14.533903121948242, "learning_rate": 1.2920000000000002e-05, "loss": 0.8099, "step": 9602 }, { "epoch": 76.824, "grad_norm": 41.771934509277344, "learning_rate": 1.2915555555555557e-05, "loss": 2.0158, "step": 9603 }, { "epoch": 76.832, "grad_norm": 27.060457229614258, "learning_rate": 1.2911111111111113e-05, "loss": 1.1928, "step": 9604 }, { "epoch": 76.84, "grad_norm": 22.24116325378418, "learning_rate": 1.2906666666666666e-05, "loss": 1.1415, "step": 9605 }, { "epoch": 76.848, "grad_norm": 21.96588897705078, "learning_rate": 1.2902222222222224e-05, "loss": 0.7569, "step": 9606 }, { "epoch": 76.856, "grad_norm": 38.338897705078125, "learning_rate": 1.2897777777777778e-05, "loss": 0.6624, "step": 9607 }, { "epoch": 76.864, "grad_norm": 17.916845321655273, "learning_rate": 1.2893333333333336e-05, "loss": 1.9545, "step": 9608 }, { "epoch": 76.872, "grad_norm": 32.1710319519043, "learning_rate": 1.2888888888888889e-05, "loss": 1.1798, "step": 9609 }, { "epoch": 76.88, "grad_norm": 25.37958335876465, "learning_rate": 1.2884444444444446e-05, "loss": 1.5592, "step": 9610 }, { "epoch": 76.888, "grad_norm": 36.43132781982422, "learning_rate": 1.288e-05, "loss": 0.7755, "step": 9611 }, { "epoch": 76.896, "grad_norm": 9.33376693725586, "learning_rate": 1.2875555555555557e-05, "loss": 0.7278, "step": 9612 }, { "epoch": 76.904, "grad_norm": 23.45598030090332, "learning_rate": 1.2871111111111112e-05, "loss": 0.8413, "step": 9613 }, { "epoch": 76.912, "grad_norm": 17.5260066986084, "learning_rate": 1.2866666666666668e-05, "loss": 0.7424, "step": 9614 }, { "epoch": 76.92, "grad_norm": 24.645322799682617, "learning_rate": 1.2862222222222223e-05, "loss": 1.0376, "step": 9615 }, { "epoch": 76.928, "grad_norm": 218.3150634765625, "learning_rate": 1.285777777777778e-05, "loss": 1.0484, "step": 9616 }, { "epoch": 76.936, "grad_norm": 10.808964729309082, "learning_rate": 1.2853333333333335e-05, "loss": 0.8436, "step": 9617 }, { "epoch": 76.944, "grad_norm": 87.20824432373047, "learning_rate": 1.2848888888888891e-05, "loss": 0.752, "step": 9618 }, { "epoch": 76.952, "grad_norm": 25.883882522583008, "learning_rate": 1.2844444444444446e-05, "loss": 0.764, "step": 9619 }, { "epoch": 76.96, "grad_norm": 51.203678131103516, "learning_rate": 1.2839999999999999e-05, "loss": 0.8537, "step": 9620 }, { "epoch": 76.968, "grad_norm": 29.507179260253906, "learning_rate": 1.2835555555555556e-05, "loss": 1.0962, "step": 9621 }, { "epoch": 76.976, "grad_norm": 21.84744644165039, "learning_rate": 1.283111111111111e-05, "loss": 0.7213, "step": 9622 }, { "epoch": 76.984, "grad_norm": 22.33560562133789, "learning_rate": 1.2826666666666667e-05, "loss": 0.6499, "step": 9623 }, { "epoch": 76.992, "grad_norm": 55.31836700439453, "learning_rate": 1.2822222222222222e-05, "loss": 0.5196, "step": 9624 }, { "epoch": 77.0, "grad_norm": 23.734214782714844, "learning_rate": 1.2817777777777778e-05, "loss": 0.9148, "step": 9625 }, { "epoch": 77.0, "eval_loss": 0.9320967197418213, "eval_map": 0.4811, "eval_map_50": 0.8106, "eval_map_75": 0.4947, "eval_map_Coverall": 0.6951, "eval_map_Face_Shield": 0.5868, "eval_map_Gloves": 0.3849, "eval_map_Goggles": 0.2802, "eval_map_Mask": 0.4583, "eval_map_large": 0.6726, "eval_map_medium": 0.3681, "eval_map_small": 0.3842, "eval_mar_1": 0.3568, "eval_mar_10": 0.6055, "eval_mar_100": 0.6182, "eval_mar_100_Coverall": 0.7644, "eval_mar_100_Face_Shield": 0.7588, "eval_mar_100_Gloves": 0.5295, "eval_mar_100_Goggles": 0.4938, "eval_mar_100_Mask": 0.5442, "eval_mar_large": 0.7587, "eval_mar_medium": 0.5298, "eval_mar_small": 0.475, "eval_runtime": 0.9348, "eval_samples_per_second": 31.021, "eval_steps_per_second": 2.139, "step": 9625 }, { "epoch": 77.008, "grad_norm": 14.120348930358887, "learning_rate": 1.2813333333333333e-05, "loss": 0.6992, "step": 9626 }, { "epoch": 77.016, "grad_norm": 17.64232635498047, "learning_rate": 1.280888888888889e-05, "loss": 1.0896, "step": 9627 }, { "epoch": 77.024, "grad_norm": 92.51974487304688, "learning_rate": 1.2804444444444445e-05, "loss": 0.9331, "step": 9628 }, { "epoch": 77.032, "grad_norm": 22.08274269104004, "learning_rate": 1.2800000000000001e-05, "loss": 1.1759, "step": 9629 }, { "epoch": 77.04, "grad_norm": 26.333393096923828, "learning_rate": 1.2795555555555554e-05, "loss": 1.2213, "step": 9630 }, { "epoch": 77.048, "grad_norm": 30.886348724365234, "learning_rate": 1.2791111111111112e-05, "loss": 0.7194, "step": 9631 }, { "epoch": 77.056, "grad_norm": 16.789043426513672, "learning_rate": 1.2786666666666666e-05, "loss": 0.8574, "step": 9632 }, { "epoch": 77.064, "grad_norm": 20.194507598876953, "learning_rate": 1.2782222222222224e-05, "loss": 0.4541, "step": 9633 }, { "epoch": 77.072, "grad_norm": 23.299671173095703, "learning_rate": 1.2777777777777777e-05, "loss": 0.8008, "step": 9634 }, { "epoch": 77.08, "grad_norm": 21.660518646240234, "learning_rate": 1.2773333333333334e-05, "loss": 0.8304, "step": 9635 }, { "epoch": 77.088, "grad_norm": 20.441129684448242, "learning_rate": 1.2768888888888888e-05, "loss": 0.7063, "step": 9636 }, { "epoch": 77.096, "grad_norm": 20.753019332885742, "learning_rate": 1.2764444444444445e-05, "loss": 0.6202, "step": 9637 }, { "epoch": 77.104, "grad_norm": 20.5952091217041, "learning_rate": 1.276e-05, "loss": 0.7794, "step": 9638 }, { "epoch": 77.112, "grad_norm": 16.948570251464844, "learning_rate": 1.2755555555555556e-05, "loss": 1.0728, "step": 9639 }, { "epoch": 77.12, "grad_norm": 31.514816284179688, "learning_rate": 1.2751111111111111e-05, "loss": 0.6982, "step": 9640 }, { "epoch": 77.128, "grad_norm": 45.895835876464844, "learning_rate": 1.2746666666666668e-05, "loss": 0.6316, "step": 9641 }, { "epoch": 77.136, "grad_norm": 26.412809371948242, "learning_rate": 1.2742222222222223e-05, "loss": 0.7725, "step": 9642 }, { "epoch": 77.144, "grad_norm": 20.88157081604004, "learning_rate": 1.2737777777777779e-05, "loss": 0.9105, "step": 9643 }, { "epoch": 77.152, "grad_norm": 22.77733039855957, "learning_rate": 1.2733333333333334e-05, "loss": 0.7958, "step": 9644 }, { "epoch": 77.16, "grad_norm": 28.466259002685547, "learning_rate": 1.272888888888889e-05, "loss": 1.2374, "step": 9645 }, { "epoch": 77.168, "grad_norm": 12.530088424682617, "learning_rate": 1.2724444444444444e-05, "loss": 1.0372, "step": 9646 }, { "epoch": 77.176, "grad_norm": 14.70002555847168, "learning_rate": 1.2720000000000002e-05, "loss": 0.903, "step": 9647 }, { "epoch": 77.184, "grad_norm": 27.14835548400879, "learning_rate": 1.2715555555555555e-05, "loss": 0.7515, "step": 9648 }, { "epoch": 77.192, "grad_norm": 24.9317569732666, "learning_rate": 1.2711111111111113e-05, "loss": 0.7671, "step": 9649 }, { "epoch": 77.2, "grad_norm": 36.82010269165039, "learning_rate": 1.2706666666666666e-05, "loss": 0.6372, "step": 9650 }, { "epoch": 77.208, "grad_norm": 15.36243724822998, "learning_rate": 1.2702222222222223e-05, "loss": 0.779, "step": 9651 }, { "epoch": 77.216, "grad_norm": 24.733144760131836, "learning_rate": 1.2697777777777778e-05, "loss": 1.3619, "step": 9652 }, { "epoch": 77.224, "grad_norm": 25.662548065185547, "learning_rate": 1.2693333333333334e-05, "loss": 0.8905, "step": 9653 }, { "epoch": 77.232, "grad_norm": 17.62268829345703, "learning_rate": 1.268888888888889e-05, "loss": 0.767, "step": 9654 }, { "epoch": 77.24, "grad_norm": 16.686601638793945, "learning_rate": 1.2684444444444446e-05, "loss": 1.0668, "step": 9655 }, { "epoch": 77.248, "grad_norm": 27.274728775024414, "learning_rate": 1.268e-05, "loss": 0.8775, "step": 9656 }, { "epoch": 77.256, "grad_norm": 15.13366413116455, "learning_rate": 1.2675555555555557e-05, "loss": 0.6216, "step": 9657 }, { "epoch": 77.264, "grad_norm": 19.443416595458984, "learning_rate": 1.2671111111111112e-05, "loss": 0.786, "step": 9658 }, { "epoch": 77.272, "grad_norm": 40.92489242553711, "learning_rate": 1.2666666666666668e-05, "loss": 0.8097, "step": 9659 }, { "epoch": 77.28, "grad_norm": 45.3271598815918, "learning_rate": 1.2662222222222223e-05, "loss": 1.25, "step": 9660 }, { "epoch": 77.288, "grad_norm": 19.809741973876953, "learning_rate": 1.265777777777778e-05, "loss": 0.8717, "step": 9661 }, { "epoch": 77.296, "grad_norm": 19.287010192871094, "learning_rate": 1.2653333333333333e-05, "loss": 0.7692, "step": 9662 }, { "epoch": 77.304, "grad_norm": 51.58768844604492, "learning_rate": 1.2648888888888891e-05, "loss": 1.0564, "step": 9663 }, { "epoch": 77.312, "grad_norm": 26.470699310302734, "learning_rate": 1.2644444444444444e-05, "loss": 0.6719, "step": 9664 }, { "epoch": 77.32, "grad_norm": 38.68220901489258, "learning_rate": 1.2640000000000003e-05, "loss": 0.8504, "step": 9665 }, { "epoch": 77.328, "grad_norm": 22.767139434814453, "learning_rate": 1.2635555555555556e-05, "loss": 0.7049, "step": 9666 }, { "epoch": 77.336, "grad_norm": 34.291358947753906, "learning_rate": 1.2631111111111112e-05, "loss": 0.8702, "step": 9667 }, { "epoch": 77.344, "grad_norm": 16.662649154663086, "learning_rate": 1.2626666666666667e-05, "loss": 1.3155, "step": 9668 }, { "epoch": 77.352, "grad_norm": 38.184513092041016, "learning_rate": 1.2622222222222224e-05, "loss": 1.7023, "step": 9669 }, { "epoch": 77.36, "grad_norm": 40.10428237915039, "learning_rate": 1.2617777777777779e-05, "loss": 0.7865, "step": 9670 }, { "epoch": 77.368, "grad_norm": 31.539093017578125, "learning_rate": 1.2613333333333332e-05, "loss": 0.833, "step": 9671 }, { "epoch": 77.376, "grad_norm": 17.016883850097656, "learning_rate": 1.260888888888889e-05, "loss": 0.72, "step": 9672 }, { "epoch": 77.384, "grad_norm": 44.82206344604492, "learning_rate": 1.2604444444444443e-05, "loss": 0.5777, "step": 9673 }, { "epoch": 77.392, "grad_norm": 32.49629211425781, "learning_rate": 1.2600000000000001e-05, "loss": 0.7453, "step": 9674 }, { "epoch": 77.4, "grad_norm": 35.2049446105957, "learning_rate": 1.2595555555555554e-05, "loss": 0.8564, "step": 9675 }, { "epoch": 77.408, "grad_norm": 20.46246337890625, "learning_rate": 1.2591111111111111e-05, "loss": 0.8448, "step": 9676 }, { "epoch": 77.416, "grad_norm": 52.12628936767578, "learning_rate": 1.2586666666666666e-05, "loss": 0.8124, "step": 9677 }, { "epoch": 77.424, "grad_norm": 28.897003173828125, "learning_rate": 1.2582222222222222e-05, "loss": 0.739, "step": 9678 }, { "epoch": 77.432, "grad_norm": 39.32246780395508, "learning_rate": 1.2577777777777777e-05, "loss": 0.8271, "step": 9679 }, { "epoch": 77.44, "grad_norm": 32.01557159423828, "learning_rate": 1.2573333333333334e-05, "loss": 0.9373, "step": 9680 }, { "epoch": 77.448, "grad_norm": 30.763193130493164, "learning_rate": 1.2568888888888889e-05, "loss": 0.8228, "step": 9681 }, { "epoch": 77.456, "grad_norm": 29.139400482177734, "learning_rate": 1.2564444444444445e-05, "loss": 0.5728, "step": 9682 }, { "epoch": 77.464, "grad_norm": 20.467241287231445, "learning_rate": 1.256e-05, "loss": 0.8693, "step": 9683 }, { "epoch": 77.472, "grad_norm": 28.102907180786133, "learning_rate": 1.2555555555555557e-05, "loss": 1.1478, "step": 9684 }, { "epoch": 77.48, "grad_norm": 34.35525894165039, "learning_rate": 1.2551111111111111e-05, "loss": 0.8273, "step": 9685 }, { "epoch": 77.488, "grad_norm": 21.368328094482422, "learning_rate": 1.2546666666666668e-05, "loss": 0.7364, "step": 9686 }, { "epoch": 77.496, "grad_norm": 26.62451934814453, "learning_rate": 1.2542222222222221e-05, "loss": 0.5996, "step": 9687 }, { "epoch": 77.504, "grad_norm": 32.066707611083984, "learning_rate": 1.253777777777778e-05, "loss": 0.9394, "step": 9688 }, { "epoch": 77.512, "grad_norm": 23.12519073486328, "learning_rate": 1.2533333333333332e-05, "loss": 1.2379, "step": 9689 }, { "epoch": 77.52, "grad_norm": 36.310848236083984, "learning_rate": 1.252888888888889e-05, "loss": 0.9558, "step": 9690 }, { "epoch": 77.528, "grad_norm": 29.0869140625, "learning_rate": 1.2524444444444444e-05, "loss": 1.1155, "step": 9691 }, { "epoch": 77.536, "grad_norm": 26.943037033081055, "learning_rate": 1.252e-05, "loss": 0.5817, "step": 9692 }, { "epoch": 77.544, "grad_norm": 17.716272354125977, "learning_rate": 1.2515555555555555e-05, "loss": 1.0618, "step": 9693 }, { "epoch": 77.552, "grad_norm": 20.061969757080078, "learning_rate": 1.2511111111111112e-05, "loss": 1.5832, "step": 9694 }, { "epoch": 77.56, "grad_norm": 18.965843200683594, "learning_rate": 1.2506666666666667e-05, "loss": 0.8569, "step": 9695 }, { "epoch": 77.568, "grad_norm": 29.19576644897461, "learning_rate": 1.2502222222222223e-05, "loss": 0.939, "step": 9696 }, { "epoch": 77.576, "grad_norm": 14.465849876403809, "learning_rate": 1.2497777777777778e-05, "loss": 0.6867, "step": 9697 }, { "epoch": 77.584, "grad_norm": 14.14419174194336, "learning_rate": 1.2493333333333333e-05, "loss": 0.6751, "step": 9698 }, { "epoch": 77.592, "grad_norm": 16.9014835357666, "learning_rate": 1.248888888888889e-05, "loss": 0.6754, "step": 9699 }, { "epoch": 77.6, "grad_norm": 45.870357513427734, "learning_rate": 1.2484444444444444e-05, "loss": 2.5969, "step": 9700 }, { "epoch": 77.608, "grad_norm": 18.432205200195312, "learning_rate": 1.248e-05, "loss": 0.7012, "step": 9701 }, { "epoch": 77.616, "grad_norm": 17.518783569335938, "learning_rate": 1.2475555555555556e-05, "loss": 1.1966, "step": 9702 }, { "epoch": 77.624, "grad_norm": 21.63229751586914, "learning_rate": 1.247111111111111e-05, "loss": 0.8732, "step": 9703 }, { "epoch": 77.632, "grad_norm": 29.330825805664062, "learning_rate": 1.2466666666666667e-05, "loss": 0.8316, "step": 9704 }, { "epoch": 77.64, "grad_norm": 30.135263442993164, "learning_rate": 1.2462222222222222e-05, "loss": 0.7464, "step": 9705 }, { "epoch": 77.648, "grad_norm": 51.00952911376953, "learning_rate": 1.2457777777777778e-05, "loss": 0.5423, "step": 9706 }, { "epoch": 77.656, "grad_norm": 22.500673294067383, "learning_rate": 1.2453333333333333e-05, "loss": 1.026, "step": 9707 }, { "epoch": 77.664, "grad_norm": 25.56868553161621, "learning_rate": 1.244888888888889e-05, "loss": 1.1192, "step": 9708 }, { "epoch": 77.672, "grad_norm": 25.50639533996582, "learning_rate": 1.2444444444444445e-05, "loss": 1.3521, "step": 9709 }, { "epoch": 77.68, "grad_norm": 15.252968788146973, "learning_rate": 1.244e-05, "loss": 0.6489, "step": 9710 }, { "epoch": 77.688, "grad_norm": 31.913698196411133, "learning_rate": 1.2435555555555556e-05, "loss": 0.8457, "step": 9711 }, { "epoch": 77.696, "grad_norm": 170.32901000976562, "learning_rate": 1.2431111111111111e-05, "loss": 1.2413, "step": 9712 }, { "epoch": 77.704, "grad_norm": 48.925262451171875, "learning_rate": 1.2426666666666667e-05, "loss": 1.2776, "step": 9713 }, { "epoch": 77.712, "grad_norm": 16.86517333984375, "learning_rate": 1.2422222222222222e-05, "loss": 0.8191, "step": 9714 }, { "epoch": 77.72, "grad_norm": 25.78905487060547, "learning_rate": 1.2417777777777779e-05, "loss": 0.5966, "step": 9715 }, { "epoch": 77.728, "grad_norm": 21.16912841796875, "learning_rate": 1.2413333333333334e-05, "loss": 0.7006, "step": 9716 }, { "epoch": 77.736, "grad_norm": 25.22292137145996, "learning_rate": 1.240888888888889e-05, "loss": 1.5817, "step": 9717 }, { "epoch": 77.744, "grad_norm": 18.03985023498535, "learning_rate": 1.2404444444444445e-05, "loss": 1.9443, "step": 9718 }, { "epoch": 77.752, "grad_norm": 25.165658950805664, "learning_rate": 1.24e-05, "loss": 0.5797, "step": 9719 }, { "epoch": 77.76, "grad_norm": 24.01133918762207, "learning_rate": 1.2395555555555556e-05, "loss": 0.8015, "step": 9720 }, { "epoch": 77.768, "grad_norm": 21.31603240966797, "learning_rate": 1.2391111111111111e-05, "loss": 0.6514, "step": 9721 }, { "epoch": 77.776, "grad_norm": 14.016029357910156, "learning_rate": 1.2386666666666668e-05, "loss": 0.614, "step": 9722 }, { "epoch": 77.784, "grad_norm": 16.73328971862793, "learning_rate": 1.2382222222222223e-05, "loss": 0.9953, "step": 9723 }, { "epoch": 77.792, "grad_norm": 17.87360954284668, "learning_rate": 1.237777777777778e-05, "loss": 0.7923, "step": 9724 }, { "epoch": 77.8, "grad_norm": 13.136285781860352, "learning_rate": 1.2373333333333334e-05, "loss": 0.7502, "step": 9725 }, { "epoch": 77.808, "grad_norm": 34.84947967529297, "learning_rate": 1.2368888888888889e-05, "loss": 1.0651, "step": 9726 }, { "epoch": 77.816, "grad_norm": 20.841890335083008, "learning_rate": 1.2364444444444445e-05, "loss": 1.089, "step": 9727 }, { "epoch": 77.824, "grad_norm": 22.86534881591797, "learning_rate": 1.236e-05, "loss": 0.5472, "step": 9728 }, { "epoch": 77.832, "grad_norm": 21.846954345703125, "learning_rate": 1.2355555555555557e-05, "loss": 0.8028, "step": 9729 }, { "epoch": 77.84, "grad_norm": 83.31507110595703, "learning_rate": 1.2351111111111112e-05, "loss": 0.7364, "step": 9730 }, { "epoch": 77.848, "grad_norm": 41.92250442504883, "learning_rate": 1.2346666666666668e-05, "loss": 0.6544, "step": 9731 }, { "epoch": 77.856, "grad_norm": 32.41651916503906, "learning_rate": 1.2342222222222223e-05, "loss": 1.6717, "step": 9732 }, { "epoch": 77.864, "grad_norm": 27.104915618896484, "learning_rate": 1.2337777777777778e-05, "loss": 0.7303, "step": 9733 }, { "epoch": 77.872, "grad_norm": 25.8261661529541, "learning_rate": 1.2333333333333334e-05, "loss": 0.9726, "step": 9734 }, { "epoch": 77.88, "grad_norm": 191.29751586914062, "learning_rate": 1.232888888888889e-05, "loss": 0.7913, "step": 9735 }, { "epoch": 77.888, "grad_norm": 20.58964729309082, "learning_rate": 1.2324444444444446e-05, "loss": 1.0418, "step": 9736 }, { "epoch": 77.896, "grad_norm": 45.436336517333984, "learning_rate": 1.232e-05, "loss": 0.9543, "step": 9737 }, { "epoch": 77.904, "grad_norm": 20.619861602783203, "learning_rate": 1.2315555555555557e-05, "loss": 0.85, "step": 9738 }, { "epoch": 77.912, "grad_norm": 219.85052490234375, "learning_rate": 1.2311111111111112e-05, "loss": 1.142, "step": 9739 }, { "epoch": 77.92, "grad_norm": 19.80376625061035, "learning_rate": 1.2306666666666669e-05, "loss": 0.9466, "step": 9740 }, { "epoch": 77.928, "grad_norm": 61.38962173461914, "learning_rate": 1.2302222222222223e-05, "loss": 0.8745, "step": 9741 }, { "epoch": 77.936, "grad_norm": 37.38256072998047, "learning_rate": 1.2297777777777778e-05, "loss": 1.0424, "step": 9742 }, { "epoch": 77.944, "grad_norm": 23.669248580932617, "learning_rate": 1.2293333333333335e-05, "loss": 0.6605, "step": 9743 }, { "epoch": 77.952, "grad_norm": 15.62473201751709, "learning_rate": 1.228888888888889e-05, "loss": 0.7117, "step": 9744 }, { "epoch": 77.96, "grad_norm": 27.50143051147461, "learning_rate": 1.2284444444444446e-05, "loss": 0.8063, "step": 9745 }, { "epoch": 77.968, "grad_norm": 43.40819549560547, "learning_rate": 1.2280000000000001e-05, "loss": 1.2431, "step": 9746 }, { "epoch": 77.976, "grad_norm": 12.060361862182617, "learning_rate": 1.2275555555555558e-05, "loss": 0.9486, "step": 9747 }, { "epoch": 77.984, "grad_norm": 48.124534606933594, "learning_rate": 1.2271111111111112e-05, "loss": 0.722, "step": 9748 }, { "epoch": 77.992, "grad_norm": 28.088871002197266, "learning_rate": 1.2266666666666667e-05, "loss": 0.8364, "step": 9749 }, { "epoch": 78.0, "grad_norm": 22.857166290283203, "learning_rate": 1.2262222222222222e-05, "loss": 0.9455, "step": 9750 }, { "epoch": 78.0, "eval_loss": 0.9537161588668823, "eval_map": 0.4748, "eval_map_50": 0.8219, "eval_map_75": 0.4941, "eval_map_Coverall": 0.6975, "eval_map_Face_Shield": 0.5629, "eval_map_Gloves": 0.374, "eval_map_Goggles": 0.2823, "eval_map_Mask": 0.4572, "eval_map_large": 0.675, "eval_map_medium": 0.3284, "eval_map_small": 0.3625, "eval_mar_1": 0.3387, "eval_mar_10": 0.5917, "eval_mar_100": 0.5965, "eval_mar_100_Coverall": 0.7622, "eval_mar_100_Face_Shield": 0.7118, "eval_mar_100_Gloves": 0.4738, "eval_mar_100_Goggles": 0.4906, "eval_mar_100_Mask": 0.5442, "eval_mar_large": 0.7729, "eval_mar_medium": 0.4613, "eval_mar_small": 0.4182, "eval_runtime": 0.937, "eval_samples_per_second": 30.95, "eval_steps_per_second": 2.134, "step": 9750 }, { "epoch": 78.008, "grad_norm": 21.81589698791504, "learning_rate": 1.2257777777777777e-05, "loss": 1.0128, "step": 9751 }, { "epoch": 78.016, "grad_norm": 24.545133590698242, "learning_rate": 1.2253333333333333e-05, "loss": 0.975, "step": 9752 }, { "epoch": 78.024, "grad_norm": 15.419025421142578, "learning_rate": 1.2248888888888888e-05, "loss": 0.9701, "step": 9753 }, { "epoch": 78.032, "grad_norm": 34.86791229248047, "learning_rate": 1.2244444444444445e-05, "loss": 0.9337, "step": 9754 }, { "epoch": 78.04, "grad_norm": 33.1217155456543, "learning_rate": 1.224e-05, "loss": 0.9284, "step": 9755 }, { "epoch": 78.048, "grad_norm": 28.324359893798828, "learning_rate": 1.2235555555555556e-05, "loss": 0.8777, "step": 9756 }, { "epoch": 78.056, "grad_norm": 29.996166229248047, "learning_rate": 1.2231111111111111e-05, "loss": 0.9259, "step": 9757 }, { "epoch": 78.064, "grad_norm": 21.90251350402832, "learning_rate": 1.2226666666666668e-05, "loss": 0.8241, "step": 9758 }, { "epoch": 78.072, "grad_norm": 17.458478927612305, "learning_rate": 1.2222222222222222e-05, "loss": 0.9902, "step": 9759 }, { "epoch": 78.08, "grad_norm": 27.493797302246094, "learning_rate": 1.2217777777777777e-05, "loss": 0.7747, "step": 9760 }, { "epoch": 78.088, "grad_norm": 56.23725509643555, "learning_rate": 1.2213333333333334e-05, "loss": 2.4781, "step": 9761 }, { "epoch": 78.096, "grad_norm": 23.752891540527344, "learning_rate": 1.2208888888888889e-05, "loss": 1.4453, "step": 9762 }, { "epoch": 78.104, "grad_norm": 15.022697448730469, "learning_rate": 1.2204444444444445e-05, "loss": 0.7191, "step": 9763 }, { "epoch": 78.112, "grad_norm": 21.987722396850586, "learning_rate": 1.22e-05, "loss": 0.7833, "step": 9764 }, { "epoch": 78.12, "grad_norm": 24.478113174438477, "learning_rate": 1.2195555555555557e-05, "loss": 0.5281, "step": 9765 }, { "epoch": 78.128, "grad_norm": 30.39900016784668, "learning_rate": 1.2191111111111111e-05, "loss": 0.9539, "step": 9766 }, { "epoch": 78.136, "grad_norm": 13.573418617248535, "learning_rate": 1.2186666666666666e-05, "loss": 0.5657, "step": 9767 }, { "epoch": 78.144, "grad_norm": 62.53437805175781, "learning_rate": 1.2182222222222223e-05, "loss": 1.0502, "step": 9768 }, { "epoch": 78.152, "grad_norm": 18.852453231811523, "learning_rate": 1.2177777777777778e-05, "loss": 0.7293, "step": 9769 }, { "epoch": 78.16, "grad_norm": 46.963768005371094, "learning_rate": 1.2173333333333334e-05, "loss": 0.7577, "step": 9770 }, { "epoch": 78.168, "grad_norm": 18.7686710357666, "learning_rate": 1.2168888888888889e-05, "loss": 0.7882, "step": 9771 }, { "epoch": 78.176, "grad_norm": 15.993254661560059, "learning_rate": 1.2164444444444446e-05, "loss": 0.7806, "step": 9772 }, { "epoch": 78.184, "grad_norm": 18.646512985229492, "learning_rate": 1.216e-05, "loss": 1.1256, "step": 9773 }, { "epoch": 78.192, "grad_norm": 23.51450538635254, "learning_rate": 1.2155555555555555e-05, "loss": 0.7896, "step": 9774 }, { "epoch": 78.2, "grad_norm": 13.022320747375488, "learning_rate": 1.2151111111111112e-05, "loss": 0.7794, "step": 9775 }, { "epoch": 78.208, "grad_norm": 29.335233688354492, "learning_rate": 1.2146666666666667e-05, "loss": 0.9796, "step": 9776 }, { "epoch": 78.216, "grad_norm": 43.797149658203125, "learning_rate": 1.2142222222222223e-05, "loss": 0.893, "step": 9777 }, { "epoch": 78.224, "grad_norm": 20.125625610351562, "learning_rate": 1.2137777777777778e-05, "loss": 0.7015, "step": 9778 }, { "epoch": 78.232, "grad_norm": 18.73279571533203, "learning_rate": 1.2133333333333335e-05, "loss": 0.7301, "step": 9779 }, { "epoch": 78.24, "grad_norm": 36.24449157714844, "learning_rate": 1.212888888888889e-05, "loss": 0.829, "step": 9780 }, { "epoch": 78.248, "grad_norm": 25.59556770324707, "learning_rate": 1.2124444444444446e-05, "loss": 0.9035, "step": 9781 }, { "epoch": 78.256, "grad_norm": 14.17172908782959, "learning_rate": 1.2120000000000001e-05, "loss": 0.9919, "step": 9782 }, { "epoch": 78.264, "grad_norm": 25.938663482666016, "learning_rate": 1.2115555555555556e-05, "loss": 0.9231, "step": 9783 }, { "epoch": 78.272, "grad_norm": 33.19386291503906, "learning_rate": 1.2111111111111112e-05, "loss": 0.9736, "step": 9784 }, { "epoch": 78.28, "grad_norm": 16.166767120361328, "learning_rate": 1.2106666666666667e-05, "loss": 0.9417, "step": 9785 }, { "epoch": 78.288, "grad_norm": 15.46986198425293, "learning_rate": 1.2102222222222224e-05, "loss": 0.6157, "step": 9786 }, { "epoch": 78.296, "grad_norm": 21.81898307800293, "learning_rate": 1.2097777777777778e-05, "loss": 0.6629, "step": 9787 }, { "epoch": 78.304, "grad_norm": 21.034460067749023, "learning_rate": 1.2093333333333335e-05, "loss": 0.9017, "step": 9788 }, { "epoch": 78.312, "grad_norm": 18.822385787963867, "learning_rate": 1.208888888888889e-05, "loss": 0.9224, "step": 9789 }, { "epoch": 78.32, "grad_norm": 27.04270362854004, "learning_rate": 1.2084444444444445e-05, "loss": 0.6852, "step": 9790 }, { "epoch": 78.328, "grad_norm": 44.31669998168945, "learning_rate": 1.2080000000000001e-05, "loss": 0.7846, "step": 9791 }, { "epoch": 78.336, "grad_norm": 29.18247413635254, "learning_rate": 1.2075555555555556e-05, "loss": 1.612, "step": 9792 }, { "epoch": 78.344, "grad_norm": 47.89002990722656, "learning_rate": 1.2071111111111113e-05, "loss": 0.7492, "step": 9793 }, { "epoch": 78.352, "grad_norm": 48.88835906982422, "learning_rate": 1.2066666666666667e-05, "loss": 0.6578, "step": 9794 }, { "epoch": 78.36, "grad_norm": 15.614007949829102, "learning_rate": 1.2062222222222224e-05, "loss": 0.9811, "step": 9795 }, { "epoch": 78.368, "grad_norm": 41.03634262084961, "learning_rate": 1.2057777777777779e-05, "loss": 0.8507, "step": 9796 }, { "epoch": 78.376, "grad_norm": 21.63961410522461, "learning_rate": 1.2053333333333334e-05, "loss": 0.6807, "step": 9797 }, { "epoch": 78.384, "grad_norm": 25.290775299072266, "learning_rate": 1.204888888888889e-05, "loss": 1.7712, "step": 9798 }, { "epoch": 78.392, "grad_norm": 231.9481201171875, "learning_rate": 1.2044444444444445e-05, "loss": 1.659, "step": 9799 }, { "epoch": 78.4, "grad_norm": 17.904605865478516, "learning_rate": 1.204e-05, "loss": 0.6936, "step": 9800 }, { "epoch": 78.408, "grad_norm": 16.92985725402832, "learning_rate": 1.2035555555555555e-05, "loss": 0.9027, "step": 9801 }, { "epoch": 78.416, "grad_norm": 183.24554443359375, "learning_rate": 1.2031111111111111e-05, "loss": 1.7219, "step": 9802 }, { "epoch": 78.424, "grad_norm": 28.01473617553711, "learning_rate": 1.2026666666666666e-05, "loss": 0.7928, "step": 9803 }, { "epoch": 78.432, "grad_norm": 18.365781784057617, "learning_rate": 1.2022222222222223e-05, "loss": 0.6571, "step": 9804 }, { "epoch": 78.44, "grad_norm": 23.381284713745117, "learning_rate": 1.2017777777777778e-05, "loss": 0.66, "step": 9805 }, { "epoch": 78.448, "grad_norm": 19.30318260192871, "learning_rate": 1.2013333333333334e-05, "loss": 1.2328, "step": 9806 }, { "epoch": 78.456, "grad_norm": 24.693361282348633, "learning_rate": 1.2008888888888889e-05, "loss": 0.6651, "step": 9807 }, { "epoch": 78.464, "grad_norm": 56.86522674560547, "learning_rate": 1.2004444444444444e-05, "loss": 1.194, "step": 9808 }, { "epoch": 78.472, "grad_norm": 32.12490463256836, "learning_rate": 1.2e-05, "loss": 0.757, "step": 9809 }, { "epoch": 78.48, "grad_norm": 36.888240814208984, "learning_rate": 1.1995555555555555e-05, "loss": 1.3556, "step": 9810 }, { "epoch": 78.488, "grad_norm": 35.358009338378906, "learning_rate": 1.1991111111111112e-05, "loss": 0.9906, "step": 9811 }, { "epoch": 78.496, "grad_norm": 30.12590980529785, "learning_rate": 1.1986666666666667e-05, "loss": 0.6955, "step": 9812 }, { "epoch": 78.504, "grad_norm": 13.500432014465332, "learning_rate": 1.1982222222222223e-05, "loss": 0.5436, "step": 9813 }, { "epoch": 78.512, "grad_norm": 14.755857467651367, "learning_rate": 1.1977777777777778e-05, "loss": 0.8083, "step": 9814 }, { "epoch": 78.52, "grad_norm": 48.0787353515625, "learning_rate": 1.1973333333333334e-05, "loss": 0.633, "step": 9815 }, { "epoch": 78.528, "grad_norm": 47.413169860839844, "learning_rate": 1.196888888888889e-05, "loss": 0.7621, "step": 9816 }, { "epoch": 78.536, "grad_norm": 18.6729679107666, "learning_rate": 1.1964444444444444e-05, "loss": 0.7211, "step": 9817 }, { "epoch": 78.544, "grad_norm": 23.565109252929688, "learning_rate": 1.196e-05, "loss": 0.6939, "step": 9818 }, { "epoch": 78.552, "grad_norm": 65.44636535644531, "learning_rate": 1.1955555555555556e-05, "loss": 0.9133, "step": 9819 }, { "epoch": 78.56, "grad_norm": 12.819721221923828, "learning_rate": 1.1951111111111112e-05, "loss": 0.7893, "step": 9820 }, { "epoch": 78.568, "grad_norm": 24.315580368041992, "learning_rate": 1.1946666666666667e-05, "loss": 0.8616, "step": 9821 }, { "epoch": 78.576, "grad_norm": 56.114967346191406, "learning_rate": 1.1942222222222223e-05, "loss": 0.6784, "step": 9822 }, { "epoch": 78.584, "grad_norm": 30.8526611328125, "learning_rate": 1.1937777777777778e-05, "loss": 0.4651, "step": 9823 }, { "epoch": 78.592, "grad_norm": 13.47140121459961, "learning_rate": 1.1933333333333333e-05, "loss": 0.66, "step": 9824 }, { "epoch": 78.6, "grad_norm": 34.2545051574707, "learning_rate": 1.192888888888889e-05, "loss": 0.922, "step": 9825 }, { "epoch": 78.608, "grad_norm": 21.06822967529297, "learning_rate": 1.1924444444444445e-05, "loss": 0.8098, "step": 9826 }, { "epoch": 78.616, "grad_norm": 22.19124984741211, "learning_rate": 1.1920000000000001e-05, "loss": 0.8302, "step": 9827 }, { "epoch": 78.624, "grad_norm": 20.508182525634766, "learning_rate": 1.1915555555555556e-05, "loss": 0.6364, "step": 9828 }, { "epoch": 78.632, "grad_norm": 42.17780685424805, "learning_rate": 1.1911111111111112e-05, "loss": 1.2623, "step": 9829 }, { "epoch": 78.64, "grad_norm": 30.05103302001953, "learning_rate": 1.1906666666666667e-05, "loss": 1.0648, "step": 9830 }, { "epoch": 78.648, "grad_norm": 15.78296947479248, "learning_rate": 1.1902222222222222e-05, "loss": 0.8038, "step": 9831 }, { "epoch": 78.656, "grad_norm": 70.66435241699219, "learning_rate": 1.1897777777777779e-05, "loss": 0.6699, "step": 9832 }, { "epoch": 78.664, "grad_norm": 19.97163963317871, "learning_rate": 1.1893333333333334e-05, "loss": 0.7739, "step": 9833 }, { "epoch": 78.672, "grad_norm": 15.561923027038574, "learning_rate": 1.188888888888889e-05, "loss": 0.8159, "step": 9834 }, { "epoch": 78.68, "grad_norm": 21.09187126159668, "learning_rate": 1.1884444444444445e-05, "loss": 1.0594, "step": 9835 }, { "epoch": 78.688, "grad_norm": 24.970932006835938, "learning_rate": 1.1880000000000001e-05, "loss": 0.6319, "step": 9836 }, { "epoch": 78.696, "grad_norm": 31.4395809173584, "learning_rate": 1.1875555555555556e-05, "loss": 0.8159, "step": 9837 }, { "epoch": 78.704, "grad_norm": 19.440311431884766, "learning_rate": 1.1871111111111113e-05, "loss": 0.8192, "step": 9838 }, { "epoch": 78.712, "grad_norm": 21.26857566833496, "learning_rate": 1.1866666666666668e-05, "loss": 0.9005, "step": 9839 }, { "epoch": 78.72, "grad_norm": 17.824649810791016, "learning_rate": 1.1862222222222223e-05, "loss": 1.0151, "step": 9840 }, { "epoch": 78.728, "grad_norm": 55.504798889160156, "learning_rate": 1.1857777777777779e-05, "loss": 0.6722, "step": 9841 }, { "epoch": 78.736, "grad_norm": 12.689275741577148, "learning_rate": 1.1853333333333334e-05, "loss": 0.7088, "step": 9842 }, { "epoch": 78.744, "grad_norm": 40.27817916870117, "learning_rate": 1.184888888888889e-05, "loss": 0.9887, "step": 9843 }, { "epoch": 78.752, "grad_norm": 23.97579574584961, "learning_rate": 1.1844444444444445e-05, "loss": 0.6927, "step": 9844 }, { "epoch": 78.76, "grad_norm": 23.854755401611328, "learning_rate": 1.1840000000000002e-05, "loss": 0.7786, "step": 9845 }, { "epoch": 78.768, "grad_norm": 19.379684448242188, "learning_rate": 1.1835555555555557e-05, "loss": 0.8129, "step": 9846 }, { "epoch": 78.776, "grad_norm": 46.537845611572266, "learning_rate": 1.1831111111111112e-05, "loss": 0.92, "step": 9847 }, { "epoch": 78.784, "grad_norm": 24.002809524536133, "learning_rate": 1.1826666666666668e-05, "loss": 0.9151, "step": 9848 }, { "epoch": 78.792, "grad_norm": 26.678386688232422, "learning_rate": 1.1822222222222223e-05, "loss": 1.0522, "step": 9849 }, { "epoch": 78.8, "grad_norm": 13.810280799865723, "learning_rate": 1.181777777777778e-05, "loss": 1.0611, "step": 9850 }, { "epoch": 78.808, "grad_norm": 20.836597442626953, "learning_rate": 1.1813333333333334e-05, "loss": 0.6625, "step": 9851 }, { "epoch": 78.816, "grad_norm": 26.22123908996582, "learning_rate": 1.1808888888888889e-05, "loss": 0.8796, "step": 9852 }, { "epoch": 78.824, "grad_norm": 36.696685791015625, "learning_rate": 1.1804444444444444e-05, "loss": 0.9626, "step": 9853 }, { "epoch": 78.832, "grad_norm": 20.371337890625, "learning_rate": 1.18e-05, "loss": 0.5357, "step": 9854 }, { "epoch": 78.84, "grad_norm": 15.151050567626953, "learning_rate": 1.1795555555555555e-05, "loss": 0.9049, "step": 9855 }, { "epoch": 78.848, "grad_norm": 38.374839782714844, "learning_rate": 1.1791111111111112e-05, "loss": 0.843, "step": 9856 }, { "epoch": 78.856, "grad_norm": 23.394813537597656, "learning_rate": 1.1786666666666667e-05, "loss": 0.7865, "step": 9857 }, { "epoch": 78.864, "grad_norm": 14.044081687927246, "learning_rate": 1.1782222222222222e-05, "loss": 0.8253, "step": 9858 }, { "epoch": 78.872, "grad_norm": 16.891704559326172, "learning_rate": 1.1777777777777778e-05, "loss": 0.7391, "step": 9859 }, { "epoch": 78.88, "grad_norm": 23.27211570739746, "learning_rate": 1.1773333333333333e-05, "loss": 0.7745, "step": 9860 }, { "epoch": 78.888, "grad_norm": 35.519412994384766, "learning_rate": 1.176888888888889e-05, "loss": 1.2213, "step": 9861 }, { "epoch": 78.896, "grad_norm": 21.010944366455078, "learning_rate": 1.1764444444444444e-05, "loss": 0.9302, "step": 9862 }, { "epoch": 78.904, "grad_norm": 20.538217544555664, "learning_rate": 1.1760000000000001e-05, "loss": 1.2441, "step": 9863 }, { "epoch": 78.912, "grad_norm": 25.755577087402344, "learning_rate": 1.1755555555555556e-05, "loss": 0.6194, "step": 9864 }, { "epoch": 78.92, "grad_norm": 19.508270263671875, "learning_rate": 1.175111111111111e-05, "loss": 0.8476, "step": 9865 }, { "epoch": 78.928, "grad_norm": 33.1707763671875, "learning_rate": 1.1746666666666667e-05, "loss": 0.6501, "step": 9866 }, { "epoch": 78.936, "grad_norm": 30.928024291992188, "learning_rate": 1.1742222222222222e-05, "loss": 0.8195, "step": 9867 }, { "epoch": 78.944, "grad_norm": 22.6987361907959, "learning_rate": 1.1737777777777779e-05, "loss": 1.4805, "step": 9868 }, { "epoch": 78.952, "grad_norm": 11.52691650390625, "learning_rate": 1.1733333333333333e-05, "loss": 0.6784, "step": 9869 }, { "epoch": 78.96, "grad_norm": 106.9863052368164, "learning_rate": 1.172888888888889e-05, "loss": 0.6099, "step": 9870 }, { "epoch": 78.968, "grad_norm": 73.93305969238281, "learning_rate": 1.1724444444444445e-05, "loss": 0.866, "step": 9871 }, { "epoch": 78.976, "grad_norm": 12.799796104431152, "learning_rate": 1.172e-05, "loss": 0.9607, "step": 9872 }, { "epoch": 78.984, "grad_norm": 26.105735778808594, "learning_rate": 1.1715555555555556e-05, "loss": 0.8055, "step": 9873 }, { "epoch": 78.992, "grad_norm": 20.217845916748047, "learning_rate": 1.1711111111111111e-05, "loss": 0.9738, "step": 9874 }, { "epoch": 79.0, "grad_norm": 42.420284271240234, "learning_rate": 1.1706666666666668e-05, "loss": 0.8835, "step": 9875 }, { "epoch": 79.0, "eval_loss": 0.9556244611740112, "eval_map": 0.448, "eval_map_50": 0.8026, "eval_map_75": 0.4156, "eval_map_Coverall": 0.6897, "eval_map_Face_Shield": 0.4912, "eval_map_Gloves": 0.3485, "eval_map_Goggles": 0.2974, "eval_map_Mask": 0.4132, "eval_map_large": 0.6472, "eval_map_medium": 0.3015, "eval_map_small": 0.3613, "eval_mar_1": 0.3481, "eval_mar_10": 0.5761, "eval_mar_100": 0.5858, "eval_mar_100_Coverall": 0.7533, "eval_mar_100_Face_Shield": 0.6882, "eval_mar_100_Gloves": 0.4754, "eval_mar_100_Goggles": 0.5063, "eval_mar_100_Mask": 0.5058, "eval_mar_large": 0.7915, "eval_mar_medium": 0.4411, "eval_mar_small": 0.4378, "eval_runtime": 0.9257, "eval_samples_per_second": 31.328, "eval_steps_per_second": 2.161, "step": 9875 }, { "epoch": 79.008, "grad_norm": 40.13557815551758, "learning_rate": 1.1702222222222222e-05, "loss": 1.077, "step": 9876 }, { "epoch": 79.016, "grad_norm": 18.50119400024414, "learning_rate": 1.1697777777777779e-05, "loss": 0.8559, "step": 9877 }, { "epoch": 79.024, "grad_norm": 26.45491600036621, "learning_rate": 1.1693333333333334e-05, "loss": 0.837, "step": 9878 }, { "epoch": 79.032, "grad_norm": 19.606542587280273, "learning_rate": 1.168888888888889e-05, "loss": 0.6068, "step": 9879 }, { "epoch": 79.04, "grad_norm": 38.601139068603516, "learning_rate": 1.1684444444444445e-05, "loss": 0.6946, "step": 9880 }, { "epoch": 79.048, "grad_norm": 62.92501449584961, "learning_rate": 1.168e-05, "loss": 0.815, "step": 9881 }, { "epoch": 79.056, "grad_norm": 14.989991188049316, "learning_rate": 1.1675555555555557e-05, "loss": 0.8509, "step": 9882 }, { "epoch": 79.064, "grad_norm": 31.16447639465332, "learning_rate": 1.1671111111111111e-05, "loss": 0.7726, "step": 9883 }, { "epoch": 79.072, "grad_norm": 31.27833366394043, "learning_rate": 1.1666666666666668e-05, "loss": 1.0252, "step": 9884 }, { "epoch": 79.08, "grad_norm": 17.85810661315918, "learning_rate": 1.1662222222222223e-05, "loss": 0.6263, "step": 9885 }, { "epoch": 79.088, "grad_norm": 28.90587615966797, "learning_rate": 1.165777777777778e-05, "loss": 0.9064, "step": 9886 }, { "epoch": 79.096, "grad_norm": 29.81203842163086, "learning_rate": 1.1653333333333334e-05, "loss": 0.9893, "step": 9887 }, { "epoch": 79.104, "grad_norm": 47.6085319519043, "learning_rate": 1.1648888888888889e-05, "loss": 1.0236, "step": 9888 }, { "epoch": 79.112, "grad_norm": 22.66244888305664, "learning_rate": 1.1644444444444446e-05, "loss": 0.5849, "step": 9889 }, { "epoch": 79.12, "grad_norm": 24.279844284057617, "learning_rate": 1.164e-05, "loss": 0.628, "step": 9890 }, { "epoch": 79.128, "grad_norm": 49.66902160644531, "learning_rate": 1.1635555555555557e-05, "loss": 1.1192, "step": 9891 }, { "epoch": 79.136, "grad_norm": 29.582250595092773, "learning_rate": 1.1631111111111112e-05, "loss": 0.7018, "step": 9892 }, { "epoch": 79.144, "grad_norm": 36.3056640625, "learning_rate": 1.1626666666666668e-05, "loss": 0.7684, "step": 9893 }, { "epoch": 79.152, "grad_norm": 37.97306442260742, "learning_rate": 1.1622222222222223e-05, "loss": 0.9147, "step": 9894 }, { "epoch": 79.16, "grad_norm": 33.1142692565918, "learning_rate": 1.1617777777777778e-05, "loss": 0.9082, "step": 9895 }, { "epoch": 79.168, "grad_norm": 16.285030364990234, "learning_rate": 1.1613333333333335e-05, "loss": 0.7627, "step": 9896 }, { "epoch": 79.176, "grad_norm": 74.27942657470703, "learning_rate": 1.160888888888889e-05, "loss": 1.8906, "step": 9897 }, { "epoch": 79.184, "grad_norm": 21.607513427734375, "learning_rate": 1.1604444444444446e-05, "loss": 0.7636, "step": 9898 }, { "epoch": 79.192, "grad_norm": 40.08641052246094, "learning_rate": 1.16e-05, "loss": 0.847, "step": 9899 }, { "epoch": 79.2, "grad_norm": 89.45165252685547, "learning_rate": 1.1595555555555557e-05, "loss": 0.7802, "step": 9900 }, { "epoch": 79.208, "grad_norm": 35.25638961791992, "learning_rate": 1.1591111111111112e-05, "loss": 0.8131, "step": 9901 }, { "epoch": 79.216, "grad_norm": 31.389326095581055, "learning_rate": 1.1586666666666669e-05, "loss": 1.2561, "step": 9902 }, { "epoch": 79.224, "grad_norm": 23.974288940429688, "learning_rate": 1.1582222222222222e-05, "loss": 0.8205, "step": 9903 }, { "epoch": 79.232, "grad_norm": 11.176375389099121, "learning_rate": 1.1577777777777778e-05, "loss": 0.9418, "step": 9904 }, { "epoch": 79.24, "grad_norm": 16.23397445678711, "learning_rate": 1.1573333333333333e-05, "loss": 0.6811, "step": 9905 }, { "epoch": 79.248, "grad_norm": 30.678747177124023, "learning_rate": 1.1568888888888888e-05, "loss": 1.1399, "step": 9906 }, { "epoch": 79.256, "grad_norm": 16.57652473449707, "learning_rate": 1.1564444444444445e-05, "loss": 0.6914, "step": 9907 }, { "epoch": 79.264, "grad_norm": 31.297536849975586, "learning_rate": 1.156e-05, "loss": 0.9645, "step": 9908 }, { "epoch": 79.272, "grad_norm": 12.626927375793457, "learning_rate": 1.1555555555555556e-05, "loss": 0.9124, "step": 9909 }, { "epoch": 79.28, "grad_norm": 28.07305908203125, "learning_rate": 1.155111111111111e-05, "loss": 0.4821, "step": 9910 }, { "epoch": 79.288, "grad_norm": 266.4908752441406, "learning_rate": 1.1546666666666667e-05, "loss": 1.2071, "step": 9911 }, { "epoch": 79.296, "grad_norm": 22.537473678588867, "learning_rate": 1.1542222222222222e-05, "loss": 1.6021, "step": 9912 }, { "epoch": 79.304, "grad_norm": 22.171573638916016, "learning_rate": 1.1537777777777779e-05, "loss": 0.8684, "step": 9913 }, { "epoch": 79.312, "grad_norm": 16.341127395629883, "learning_rate": 1.1533333333333334e-05, "loss": 0.9696, "step": 9914 }, { "epoch": 79.32, "grad_norm": 20.067073822021484, "learning_rate": 1.1528888888888888e-05, "loss": 0.8254, "step": 9915 }, { "epoch": 79.328, "grad_norm": 31.111188888549805, "learning_rate": 1.1524444444444445e-05, "loss": 0.8824, "step": 9916 }, { "epoch": 79.336, "grad_norm": 52.77494430541992, "learning_rate": 1.152e-05, "loss": 0.9797, "step": 9917 }, { "epoch": 79.344, "grad_norm": 22.389070510864258, "learning_rate": 1.1515555555555556e-05, "loss": 0.5769, "step": 9918 }, { "epoch": 79.352, "grad_norm": 20.138301849365234, "learning_rate": 1.1511111111111111e-05, "loss": 0.9905, "step": 9919 }, { "epoch": 79.36, "grad_norm": 25.547382354736328, "learning_rate": 1.1506666666666668e-05, "loss": 0.6692, "step": 9920 }, { "epoch": 79.368, "grad_norm": 14.946374893188477, "learning_rate": 1.1502222222222223e-05, "loss": 0.8785, "step": 9921 }, { "epoch": 79.376, "grad_norm": 23.864280700683594, "learning_rate": 1.1497777777777777e-05, "loss": 0.8845, "step": 9922 }, { "epoch": 79.384, "grad_norm": 40.10507583618164, "learning_rate": 1.1493333333333334e-05, "loss": 2.6907, "step": 9923 }, { "epoch": 79.392, "grad_norm": 19.28441047668457, "learning_rate": 1.1488888888888889e-05, "loss": 0.8153, "step": 9924 }, { "epoch": 79.4, "grad_norm": 11.667598724365234, "learning_rate": 1.1484444444444445e-05, "loss": 0.6637, "step": 9925 }, { "epoch": 79.408, "grad_norm": 17.75218963623047, "learning_rate": 1.148e-05, "loss": 1.2093, "step": 9926 }, { "epoch": 79.416, "grad_norm": 35.062530517578125, "learning_rate": 1.1475555555555557e-05, "loss": 0.7612, "step": 9927 }, { "epoch": 79.424, "grad_norm": 23.439619064331055, "learning_rate": 1.1471111111111112e-05, "loss": 0.7608, "step": 9928 }, { "epoch": 79.432, "grad_norm": 105.04265594482422, "learning_rate": 1.1466666666666666e-05, "loss": 1.0247, "step": 9929 }, { "epoch": 79.44, "grad_norm": 19.47076416015625, "learning_rate": 1.1462222222222223e-05, "loss": 0.6598, "step": 9930 }, { "epoch": 79.448, "grad_norm": 27.32171630859375, "learning_rate": 1.1457777777777778e-05, "loss": 0.89, "step": 9931 }, { "epoch": 79.456, "grad_norm": 14.748472213745117, "learning_rate": 1.1453333333333334e-05, "loss": 1.0506, "step": 9932 }, { "epoch": 79.464, "grad_norm": 20.415063858032227, "learning_rate": 1.144888888888889e-05, "loss": 0.8288, "step": 9933 }, { "epoch": 79.472, "grad_norm": 17.564647674560547, "learning_rate": 1.1444444444444446e-05, "loss": 0.5587, "step": 9934 }, { "epoch": 79.48, "grad_norm": 38.347049713134766, "learning_rate": 1.144e-05, "loss": 0.7274, "step": 9935 }, { "epoch": 79.488, "grad_norm": 16.648027420043945, "learning_rate": 1.1435555555555557e-05, "loss": 0.6692, "step": 9936 }, { "epoch": 79.496, "grad_norm": 70.57943725585938, "learning_rate": 1.1431111111111112e-05, "loss": 0.6904, "step": 9937 }, { "epoch": 79.504, "grad_norm": 85.71636199951172, "learning_rate": 1.1426666666666667e-05, "loss": 0.7601, "step": 9938 }, { "epoch": 79.512, "grad_norm": 23.49964141845703, "learning_rate": 1.1422222222222223e-05, "loss": 0.849, "step": 9939 }, { "epoch": 79.52, "grad_norm": 23.42362403869629, "learning_rate": 1.1417777777777778e-05, "loss": 0.7804, "step": 9940 }, { "epoch": 79.528, "grad_norm": 19.042810440063477, "learning_rate": 1.1413333333333335e-05, "loss": 0.9893, "step": 9941 }, { "epoch": 79.536, "grad_norm": 22.823314666748047, "learning_rate": 1.140888888888889e-05, "loss": 0.6945, "step": 9942 }, { "epoch": 79.544, "grad_norm": 23.88494110107422, "learning_rate": 1.1404444444444446e-05, "loss": 0.9591, "step": 9943 }, { "epoch": 79.552, "grad_norm": 31.21543312072754, "learning_rate": 1.1400000000000001e-05, "loss": 0.9907, "step": 9944 }, { "epoch": 79.56, "grad_norm": 45.84779357910156, "learning_rate": 1.1395555555555556e-05, "loss": 0.946, "step": 9945 }, { "epoch": 79.568, "grad_norm": 21.639339447021484, "learning_rate": 1.1391111111111112e-05, "loss": 0.6608, "step": 9946 }, { "epoch": 79.576, "grad_norm": 27.544870376586914, "learning_rate": 1.1386666666666667e-05, "loss": 1.0, "step": 9947 }, { "epoch": 79.584, "grad_norm": 176.93380737304688, "learning_rate": 1.1382222222222224e-05, "loss": 0.888, "step": 9948 }, { "epoch": 79.592, "grad_norm": 29.93700408935547, "learning_rate": 1.1377777777777779e-05, "loss": 0.8969, "step": 9949 }, { "epoch": 79.6, "grad_norm": 22.42576789855957, "learning_rate": 1.1373333333333335e-05, "loss": 1.0562, "step": 9950 }, { "epoch": 79.608, "grad_norm": 68.8604507446289, "learning_rate": 1.136888888888889e-05, "loss": 1.0491, "step": 9951 }, { "epoch": 79.616, "grad_norm": 40.7264518737793, "learning_rate": 1.1364444444444445e-05, "loss": 1.1128, "step": 9952 }, { "epoch": 79.624, "grad_norm": 18.28996467590332, "learning_rate": 1.1360000000000001e-05, "loss": 0.667, "step": 9953 }, { "epoch": 79.632, "grad_norm": 40.021446228027344, "learning_rate": 1.1355555555555556e-05, "loss": 0.6073, "step": 9954 }, { "epoch": 79.64, "grad_norm": 32.5058708190918, "learning_rate": 1.1351111111111111e-05, "loss": 0.8837, "step": 9955 }, { "epoch": 79.648, "grad_norm": 19.856643676757812, "learning_rate": 1.1346666666666666e-05, "loss": 0.8395, "step": 9956 }, { "epoch": 79.656, "grad_norm": 23.2686767578125, "learning_rate": 1.1342222222222222e-05, "loss": 0.775, "step": 9957 }, { "epoch": 79.664, "grad_norm": 32.448341369628906, "learning_rate": 1.1337777777777777e-05, "loss": 1.1947, "step": 9958 }, { "epoch": 79.672, "grad_norm": 17.502593994140625, "learning_rate": 1.1333333333333334e-05, "loss": 0.6794, "step": 9959 }, { "epoch": 79.68, "grad_norm": 28.787620544433594, "learning_rate": 1.1328888888888889e-05, "loss": 1.1673, "step": 9960 }, { "epoch": 79.688, "grad_norm": 20.2471923828125, "learning_rate": 1.1324444444444445e-05, "loss": 0.64, "step": 9961 }, { "epoch": 79.696, "grad_norm": 13.437479972839355, "learning_rate": 1.132e-05, "loss": 0.754, "step": 9962 }, { "epoch": 79.704, "grad_norm": 16.821121215820312, "learning_rate": 1.1315555555555555e-05, "loss": 1.0517, "step": 9963 }, { "epoch": 79.712, "grad_norm": 12.751896858215332, "learning_rate": 1.1311111111111111e-05, "loss": 0.8377, "step": 9964 }, { "epoch": 79.72, "grad_norm": 17.398704528808594, "learning_rate": 1.1306666666666666e-05, "loss": 1.1237, "step": 9965 }, { "epoch": 79.728, "grad_norm": 33.23317337036133, "learning_rate": 1.1302222222222223e-05, "loss": 1.2897, "step": 9966 }, { "epoch": 79.736, "grad_norm": 36.803688049316406, "learning_rate": 1.1297777777777778e-05, "loss": 1.8097, "step": 9967 }, { "epoch": 79.744, "grad_norm": 17.629329681396484, "learning_rate": 1.1293333333333334e-05, "loss": 0.7917, "step": 9968 }, { "epoch": 79.752, "grad_norm": 18.946033477783203, "learning_rate": 1.1288888888888889e-05, "loss": 0.7576, "step": 9969 }, { "epoch": 79.76, "grad_norm": 17.16134262084961, "learning_rate": 1.1284444444444444e-05, "loss": 0.7843, "step": 9970 }, { "epoch": 79.768, "grad_norm": 22.591106414794922, "learning_rate": 1.128e-05, "loss": 1.1324, "step": 9971 }, { "epoch": 79.776, "grad_norm": 26.104228973388672, "learning_rate": 1.1275555555555555e-05, "loss": 0.8348, "step": 9972 }, { "epoch": 79.784, "grad_norm": 37.14424514770508, "learning_rate": 1.1271111111111112e-05, "loss": 1.5183, "step": 9973 }, { "epoch": 79.792, "grad_norm": 24.351354598999023, "learning_rate": 1.1266666666666667e-05, "loss": 0.9327, "step": 9974 }, { "epoch": 79.8, "grad_norm": 27.906076431274414, "learning_rate": 1.1262222222222223e-05, "loss": 0.7078, "step": 9975 }, { "epoch": 79.808, "grad_norm": 25.51209831237793, "learning_rate": 1.1257777777777778e-05, "loss": 0.8213, "step": 9976 }, { "epoch": 79.816, "grad_norm": 37.95702362060547, "learning_rate": 1.1253333333333335e-05, "loss": 0.8467, "step": 9977 }, { "epoch": 79.824, "grad_norm": 29.9941463470459, "learning_rate": 1.124888888888889e-05, "loss": 0.7228, "step": 9978 }, { "epoch": 79.832, "grad_norm": 10.036238670349121, "learning_rate": 1.1244444444444444e-05, "loss": 0.5733, "step": 9979 }, { "epoch": 79.84, "grad_norm": 25.027315139770508, "learning_rate": 1.124e-05, "loss": 0.8953, "step": 9980 }, { "epoch": 79.848, "grad_norm": 17.860658645629883, "learning_rate": 1.1235555555555556e-05, "loss": 0.8278, "step": 9981 }, { "epoch": 79.856, "grad_norm": 22.10912322998047, "learning_rate": 1.1231111111111112e-05, "loss": 0.8942, "step": 9982 }, { "epoch": 79.864, "grad_norm": 15.350042343139648, "learning_rate": 1.1226666666666667e-05, "loss": 0.9678, "step": 9983 }, { "epoch": 79.872, "grad_norm": 21.98790740966797, "learning_rate": 1.1222222222222224e-05, "loss": 0.7854, "step": 9984 }, { "epoch": 79.88, "grad_norm": 35.3503532409668, "learning_rate": 1.1217777777777778e-05, "loss": 0.9944, "step": 9985 }, { "epoch": 79.888, "grad_norm": 13.404831886291504, "learning_rate": 1.1213333333333333e-05, "loss": 0.9427, "step": 9986 }, { "epoch": 79.896, "grad_norm": 37.778255462646484, "learning_rate": 1.120888888888889e-05, "loss": 0.6087, "step": 9987 }, { "epoch": 79.904, "grad_norm": 24.118541717529297, "learning_rate": 1.1204444444444445e-05, "loss": 0.7593, "step": 9988 }, { "epoch": 79.912, "grad_norm": 34.33177947998047, "learning_rate": 1.1200000000000001e-05, "loss": 0.6474, "step": 9989 }, { "epoch": 79.92, "grad_norm": 32.089256286621094, "learning_rate": 1.1195555555555556e-05, "loss": 0.6901, "step": 9990 }, { "epoch": 79.928, "grad_norm": 40.540523529052734, "learning_rate": 1.1191111111111113e-05, "loss": 1.1233, "step": 9991 }, { "epoch": 79.936, "grad_norm": 45.69120788574219, "learning_rate": 1.1186666666666667e-05, "loss": 0.7261, "step": 9992 }, { "epoch": 79.944, "grad_norm": 31.141883850097656, "learning_rate": 1.1182222222222222e-05, "loss": 0.7528, "step": 9993 }, { "epoch": 79.952, "grad_norm": 28.41066551208496, "learning_rate": 1.1177777777777779e-05, "loss": 0.5453, "step": 9994 }, { "epoch": 79.96, "grad_norm": 304.03564453125, "learning_rate": 1.1173333333333334e-05, "loss": 1.7241, "step": 9995 }, { "epoch": 79.968, "grad_norm": 29.403724670410156, "learning_rate": 1.116888888888889e-05, "loss": 0.5578, "step": 9996 }, { "epoch": 79.976, "grad_norm": 35.20592498779297, "learning_rate": 1.1164444444444445e-05, "loss": 0.7635, "step": 9997 }, { "epoch": 79.984, "grad_norm": 19.24003028869629, "learning_rate": 1.1160000000000002e-05, "loss": 0.587, "step": 9998 }, { "epoch": 79.992, "grad_norm": 23.22224235534668, "learning_rate": 1.1155555555555556e-05, "loss": 0.6509, "step": 9999 }, { "epoch": 80.0, "grad_norm": 36.718936920166016, "learning_rate": 1.1151111111111113e-05, "loss": 0.9817, "step": 10000 }, { "epoch": 80.0, "eval_loss": 0.9123949408531189, "eval_map": 0.4734, "eval_map_50": 0.8147, "eval_map_75": 0.4774, "eval_map_Coverall": 0.6984, "eval_map_Face_Shield": 0.6026, "eval_map_Gloves": 0.3649, "eval_map_Goggles": 0.2891, "eval_map_Mask": 0.4119, "eval_map_large": 0.7009, "eval_map_medium": 0.3301, "eval_map_small": 0.3578, "eval_mar_1": 0.349, "eval_mar_10": 0.582, "eval_mar_100": 0.5999, "eval_mar_100_Coverall": 0.7867, "eval_mar_100_Face_Shield": 0.7294, "eval_mar_100_Gloves": 0.4934, "eval_mar_100_Goggles": 0.4844, "eval_mar_100_Mask": 0.5058, "eval_mar_large": 0.7928, "eval_mar_medium": 0.4975, "eval_mar_small": 0.404, "eval_runtime": 0.9152, "eval_samples_per_second": 31.687, "eval_steps_per_second": 2.185, "step": 10000 }, { "epoch": 80.008, "grad_norm": 22.086750030517578, "learning_rate": 1.1146666666666668e-05, "loss": 0.6336, "step": 10001 }, { "epoch": 80.016, "grad_norm": 14.33431625366211, "learning_rate": 1.1142222222222223e-05, "loss": 0.8107, "step": 10002 }, { "epoch": 80.024, "grad_norm": 14.60013198852539, "learning_rate": 1.113777777777778e-05, "loss": 0.7968, "step": 10003 }, { "epoch": 80.032, "grad_norm": 29.87823486328125, "learning_rate": 1.1133333333333334e-05, "loss": 0.7855, "step": 10004 }, { "epoch": 80.04, "grad_norm": 26.33049964904785, "learning_rate": 1.112888888888889e-05, "loss": 0.8167, "step": 10005 }, { "epoch": 80.048, "grad_norm": 17.536911010742188, "learning_rate": 1.1124444444444445e-05, "loss": 0.6778, "step": 10006 }, { "epoch": 80.056, "grad_norm": 15.652186393737793, "learning_rate": 1.112e-05, "loss": 0.6124, "step": 10007 }, { "epoch": 80.064, "grad_norm": 21.205110549926758, "learning_rate": 1.1115555555555555e-05, "loss": 1.0894, "step": 10008 }, { "epoch": 80.072, "grad_norm": 28.48960304260254, "learning_rate": 1.1111111111111112e-05, "loss": 0.7774, "step": 10009 }, { "epoch": 80.08, "grad_norm": 20.29580307006836, "learning_rate": 1.1106666666666666e-05, "loss": 0.6189, "step": 10010 }, { "epoch": 80.088, "grad_norm": 21.383522033691406, "learning_rate": 1.1102222222222223e-05, "loss": 0.6713, "step": 10011 }, { "epoch": 80.096, "grad_norm": 26.37599754333496, "learning_rate": 1.1097777777777778e-05, "loss": 0.5321, "step": 10012 }, { "epoch": 80.104, "grad_norm": 27.143461227416992, "learning_rate": 1.1093333333333333e-05, "loss": 0.6351, "step": 10013 }, { "epoch": 80.112, "grad_norm": 19.706371307373047, "learning_rate": 1.108888888888889e-05, "loss": 0.7109, "step": 10014 }, { "epoch": 80.12, "grad_norm": 27.534944534301758, "learning_rate": 1.1084444444444444e-05, "loss": 0.8706, "step": 10015 }, { "epoch": 80.128, "grad_norm": 25.631874084472656, "learning_rate": 1.108e-05, "loss": 0.6902, "step": 10016 }, { "epoch": 80.136, "grad_norm": 24.965700149536133, "learning_rate": 1.1075555555555555e-05, "loss": 0.716, "step": 10017 }, { "epoch": 80.144, "grad_norm": 20.370521545410156, "learning_rate": 1.1071111111111112e-05, "loss": 0.729, "step": 10018 }, { "epoch": 80.152, "grad_norm": 373.2560729980469, "learning_rate": 1.1066666666666667e-05, "loss": 1.0387, "step": 10019 }, { "epoch": 80.16, "grad_norm": 19.734783172607422, "learning_rate": 1.1062222222222222e-05, "loss": 0.6142, "step": 10020 }, { "epoch": 80.168, "grad_norm": 21.39973258972168, "learning_rate": 1.1057777777777778e-05, "loss": 0.6485, "step": 10021 }, { "epoch": 80.176, "grad_norm": 20.914081573486328, "learning_rate": 1.1053333333333333e-05, "loss": 0.8064, "step": 10022 }, { "epoch": 80.184, "grad_norm": 23.933364868164062, "learning_rate": 1.104888888888889e-05, "loss": 0.9251, "step": 10023 }, { "epoch": 80.192, "grad_norm": 18.800668716430664, "learning_rate": 1.1044444444444444e-05, "loss": 0.6956, "step": 10024 }, { "epoch": 80.2, "grad_norm": 24.180767059326172, "learning_rate": 1.1040000000000001e-05, "loss": 0.9136, "step": 10025 }, { "epoch": 80.208, "grad_norm": 35.882423400878906, "learning_rate": 1.1035555555555556e-05, "loss": 1.0911, "step": 10026 }, { "epoch": 80.216, "grad_norm": 88.43877410888672, "learning_rate": 1.103111111111111e-05, "loss": 0.682, "step": 10027 }, { "epoch": 80.224, "grad_norm": 162.37203979492188, "learning_rate": 1.1026666666666667e-05, "loss": 1.0103, "step": 10028 }, { "epoch": 80.232, "grad_norm": 46.67645263671875, "learning_rate": 1.1022222222222222e-05, "loss": 0.7566, "step": 10029 }, { "epoch": 80.24, "grad_norm": 21.66681671142578, "learning_rate": 1.1017777777777779e-05, "loss": 0.7049, "step": 10030 }, { "epoch": 80.248, "grad_norm": 18.22466468811035, "learning_rate": 1.1013333333333333e-05, "loss": 0.8315, "step": 10031 }, { "epoch": 80.256, "grad_norm": 20.11448097229004, "learning_rate": 1.100888888888889e-05, "loss": 0.8014, "step": 10032 }, { "epoch": 80.264, "grad_norm": 19.752758026123047, "learning_rate": 1.1004444444444445e-05, "loss": 0.9398, "step": 10033 }, { "epoch": 80.272, "grad_norm": 26.939414978027344, "learning_rate": 1.1000000000000001e-05, "loss": 1.1932, "step": 10034 }, { "epoch": 80.28, "grad_norm": 24.005672454833984, "learning_rate": 1.0995555555555556e-05, "loss": 0.8581, "step": 10035 }, { "epoch": 80.288, "grad_norm": 16.109088897705078, "learning_rate": 1.0991111111111111e-05, "loss": 0.6229, "step": 10036 }, { "epoch": 80.296, "grad_norm": 24.49561309814453, "learning_rate": 1.0986666666666668e-05, "loss": 0.6257, "step": 10037 }, { "epoch": 80.304, "grad_norm": 19.125579833984375, "learning_rate": 1.0982222222222222e-05, "loss": 1.0089, "step": 10038 }, { "epoch": 80.312, "grad_norm": 20.40328025817871, "learning_rate": 1.0977777777777779e-05, "loss": 1.1679, "step": 10039 }, { "epoch": 80.32, "grad_norm": 19.829696655273438, "learning_rate": 1.0973333333333334e-05, "loss": 0.9167, "step": 10040 }, { "epoch": 80.328, "grad_norm": 146.10597229003906, "learning_rate": 1.096888888888889e-05, "loss": 1.4889, "step": 10041 }, { "epoch": 80.336, "grad_norm": 18.469268798828125, "learning_rate": 1.0964444444444445e-05, "loss": 0.6819, "step": 10042 }, { "epoch": 80.344, "grad_norm": 19.114870071411133, "learning_rate": 1.096e-05, "loss": 0.8325, "step": 10043 }, { "epoch": 80.352, "grad_norm": 28.007591247558594, "learning_rate": 1.0955555555555557e-05, "loss": 0.7082, "step": 10044 }, { "epoch": 80.36, "grad_norm": 24.098613739013672, "learning_rate": 1.0951111111111111e-05, "loss": 1.0941, "step": 10045 }, { "epoch": 80.368, "grad_norm": 40.61500930786133, "learning_rate": 1.0946666666666668e-05, "loss": 0.8028, "step": 10046 }, { "epoch": 80.376, "grad_norm": 15.193598747253418, "learning_rate": 1.0942222222222223e-05, "loss": 0.8819, "step": 10047 }, { "epoch": 80.384, "grad_norm": 33.70084762573242, "learning_rate": 1.093777777777778e-05, "loss": 0.5746, "step": 10048 }, { "epoch": 80.392, "grad_norm": 20.235761642456055, "learning_rate": 1.0933333333333334e-05, "loss": 1.067, "step": 10049 }, { "epoch": 80.4, "grad_norm": 42.242027282714844, "learning_rate": 1.0928888888888889e-05, "loss": 0.7126, "step": 10050 }, { "epoch": 80.408, "grad_norm": 24.939605712890625, "learning_rate": 1.0924444444444446e-05, "loss": 1.1249, "step": 10051 }, { "epoch": 80.416, "grad_norm": 38.45451354980469, "learning_rate": 1.092e-05, "loss": 0.7854, "step": 10052 }, { "epoch": 80.424, "grad_norm": 25.109968185424805, "learning_rate": 1.0915555555555557e-05, "loss": 0.7862, "step": 10053 }, { "epoch": 80.432, "grad_norm": 12.022293090820312, "learning_rate": 1.0911111111111112e-05, "loss": 0.83, "step": 10054 }, { "epoch": 80.44, "grad_norm": 31.444028854370117, "learning_rate": 1.0906666666666668e-05, "loss": 0.7858, "step": 10055 }, { "epoch": 80.448, "grad_norm": 20.61294937133789, "learning_rate": 1.0902222222222223e-05, "loss": 0.8444, "step": 10056 }, { "epoch": 80.456, "grad_norm": 45.05363082885742, "learning_rate": 1.089777777777778e-05, "loss": 0.8849, "step": 10057 }, { "epoch": 80.464, "grad_norm": 13.417984962463379, "learning_rate": 1.0893333333333333e-05, "loss": 0.8012, "step": 10058 }, { "epoch": 80.472, "grad_norm": 24.731525421142578, "learning_rate": 1.088888888888889e-05, "loss": 1.7961, "step": 10059 }, { "epoch": 80.48, "grad_norm": 14.919549942016602, "learning_rate": 1.0884444444444444e-05, "loss": 0.6966, "step": 10060 }, { "epoch": 80.488, "grad_norm": 35.44174575805664, "learning_rate": 1.088e-05, "loss": 2.259, "step": 10061 }, { "epoch": 80.496, "grad_norm": 22.35218048095703, "learning_rate": 1.0875555555555556e-05, "loss": 0.936, "step": 10062 }, { "epoch": 80.504, "grad_norm": 27.017332077026367, "learning_rate": 1.087111111111111e-05, "loss": 0.8365, "step": 10063 }, { "epoch": 80.512, "grad_norm": 40.835880279541016, "learning_rate": 1.0866666666666667e-05, "loss": 0.7188, "step": 10064 }, { "epoch": 80.52, "grad_norm": 36.015655517578125, "learning_rate": 1.0862222222222222e-05, "loss": 0.9458, "step": 10065 }, { "epoch": 80.528, "grad_norm": 45.52705764770508, "learning_rate": 1.0857777777777778e-05, "loss": 0.8432, "step": 10066 }, { "epoch": 80.536, "grad_norm": 29.904190063476562, "learning_rate": 1.0853333333333333e-05, "loss": 0.643, "step": 10067 }, { "epoch": 80.544, "grad_norm": 30.38014030456543, "learning_rate": 1.0848888888888888e-05, "loss": 0.9815, "step": 10068 }, { "epoch": 80.552, "grad_norm": 23.603374481201172, "learning_rate": 1.0844444444444445e-05, "loss": 1.02, "step": 10069 }, { "epoch": 80.56, "grad_norm": 20.71649742126465, "learning_rate": 1.084e-05, "loss": 0.5813, "step": 10070 }, { "epoch": 80.568, "grad_norm": 17.071475982666016, "learning_rate": 1.0835555555555556e-05, "loss": 1.0953, "step": 10071 }, { "epoch": 80.576, "grad_norm": 27.537837982177734, "learning_rate": 1.0831111111111111e-05, "loss": 0.9055, "step": 10072 }, { "epoch": 80.584, "grad_norm": 34.66092300415039, "learning_rate": 1.0826666666666667e-05, "loss": 0.5161, "step": 10073 }, { "epoch": 80.592, "grad_norm": 13.96732234954834, "learning_rate": 1.0822222222222222e-05, "loss": 0.8577, "step": 10074 }, { "epoch": 80.6, "grad_norm": 19.53449821472168, "learning_rate": 1.0817777777777779e-05, "loss": 1.1102, "step": 10075 }, { "epoch": 80.608, "grad_norm": 22.617855072021484, "learning_rate": 1.0813333333333334e-05, "loss": 1.1228, "step": 10076 }, { "epoch": 80.616, "grad_norm": 93.2249526977539, "learning_rate": 1.0808888888888889e-05, "loss": 0.9413, "step": 10077 }, { "epoch": 80.624, "grad_norm": 17.168920516967773, "learning_rate": 1.0804444444444445e-05, "loss": 0.8575, "step": 10078 }, { "epoch": 80.632, "grad_norm": 29.929885864257812, "learning_rate": 1.08e-05, "loss": 0.8086, "step": 10079 }, { "epoch": 80.64, "grad_norm": 34.77775573730469, "learning_rate": 1.0795555555555556e-05, "loss": 1.4815, "step": 10080 }, { "epoch": 80.648, "grad_norm": 37.00553512573242, "learning_rate": 1.0791111111111111e-05, "loss": 0.8539, "step": 10081 }, { "epoch": 80.656, "grad_norm": 31.834461212158203, "learning_rate": 1.0786666666666668e-05, "loss": 0.8666, "step": 10082 }, { "epoch": 80.664, "grad_norm": 18.31608009338379, "learning_rate": 1.0782222222222223e-05, "loss": 0.7656, "step": 10083 }, { "epoch": 80.672, "grad_norm": 37.921443939208984, "learning_rate": 1.0777777777777778e-05, "loss": 0.9028, "step": 10084 }, { "epoch": 80.68, "grad_norm": 34.13949203491211, "learning_rate": 1.0773333333333334e-05, "loss": 0.9138, "step": 10085 }, { "epoch": 80.688, "grad_norm": 26.960365295410156, "learning_rate": 1.0768888888888889e-05, "loss": 0.7889, "step": 10086 }, { "epoch": 80.696, "grad_norm": 18.489017486572266, "learning_rate": 1.0764444444444445e-05, "loss": 0.7814, "step": 10087 }, { "epoch": 80.704, "grad_norm": 22.591867446899414, "learning_rate": 1.076e-05, "loss": 1.3697, "step": 10088 }, { "epoch": 80.712, "grad_norm": 23.60611915588379, "learning_rate": 1.0755555555555557e-05, "loss": 0.7854, "step": 10089 }, { "epoch": 80.72, "grad_norm": 17.37331199645996, "learning_rate": 1.0751111111111112e-05, "loss": 0.7859, "step": 10090 }, { "epoch": 80.728, "grad_norm": 21.549617767333984, "learning_rate": 1.0746666666666667e-05, "loss": 0.8366, "step": 10091 }, { "epoch": 80.736, "grad_norm": 23.773048400878906, "learning_rate": 1.0742222222222223e-05, "loss": 0.6914, "step": 10092 }, { "epoch": 80.744, "grad_norm": 18.961681365966797, "learning_rate": 1.0737777777777778e-05, "loss": 1.0525, "step": 10093 }, { "epoch": 80.752, "grad_norm": 41.957332611083984, "learning_rate": 1.0733333333333334e-05, "loss": 0.7191, "step": 10094 }, { "epoch": 80.76, "grad_norm": 317.2366943359375, "learning_rate": 1.072888888888889e-05, "loss": 0.967, "step": 10095 }, { "epoch": 80.768, "grad_norm": 20.274091720581055, "learning_rate": 1.0724444444444446e-05, "loss": 0.6246, "step": 10096 }, { "epoch": 80.776, "grad_norm": 36.43967819213867, "learning_rate": 1.072e-05, "loss": 0.8774, "step": 10097 }, { "epoch": 80.784, "grad_norm": 29.47465705871582, "learning_rate": 1.0715555555555557e-05, "loss": 0.9593, "step": 10098 }, { "epoch": 80.792, "grad_norm": 49.143577575683594, "learning_rate": 1.0711111111111112e-05, "loss": 0.8441, "step": 10099 }, { "epoch": 80.8, "grad_norm": 16.159196853637695, "learning_rate": 1.0706666666666667e-05, "loss": 1.094, "step": 10100 }, { "epoch": 80.808, "grad_norm": 31.79847526550293, "learning_rate": 1.0702222222222223e-05, "loss": 1.8219, "step": 10101 }, { "epoch": 80.816, "grad_norm": 23.224163055419922, "learning_rate": 1.0697777777777778e-05, "loss": 0.6141, "step": 10102 }, { "epoch": 80.824, "grad_norm": 87.08271789550781, "learning_rate": 1.0693333333333335e-05, "loss": 2.5423, "step": 10103 }, { "epoch": 80.832, "grad_norm": 33.59432601928711, "learning_rate": 1.068888888888889e-05, "loss": 0.7986, "step": 10104 }, { "epoch": 80.84, "grad_norm": 42.3831672668457, "learning_rate": 1.0684444444444446e-05, "loss": 0.6285, "step": 10105 }, { "epoch": 80.848, "grad_norm": 29.976816177368164, "learning_rate": 1.0680000000000001e-05, "loss": 1.0126, "step": 10106 }, { "epoch": 80.856, "grad_norm": 15.54134750366211, "learning_rate": 1.0675555555555556e-05, "loss": 1.0486, "step": 10107 }, { "epoch": 80.864, "grad_norm": 26.33356475830078, "learning_rate": 1.0671111111111112e-05, "loss": 0.8733, "step": 10108 }, { "epoch": 80.872, "grad_norm": 24.44985580444336, "learning_rate": 1.0666666666666667e-05, "loss": 0.7534, "step": 10109 }, { "epoch": 80.88, "grad_norm": 42.07669448852539, "learning_rate": 1.0662222222222222e-05, "loss": 1.8683, "step": 10110 }, { "epoch": 80.888, "grad_norm": 19.277875900268555, "learning_rate": 1.0657777777777777e-05, "loss": 0.8345, "step": 10111 }, { "epoch": 80.896, "grad_norm": 13.308664321899414, "learning_rate": 1.0653333333333334e-05, "loss": 0.9794, "step": 10112 }, { "epoch": 80.904, "grad_norm": 31.209346771240234, "learning_rate": 1.0648888888888888e-05, "loss": 0.6395, "step": 10113 }, { "epoch": 80.912, "grad_norm": 17.29077911376953, "learning_rate": 1.0644444444444445e-05, "loss": 0.9529, "step": 10114 }, { "epoch": 80.92, "grad_norm": 49.141258239746094, "learning_rate": 1.064e-05, "loss": 0.5469, "step": 10115 }, { "epoch": 80.928, "grad_norm": 26.751922607421875, "learning_rate": 1.0635555555555556e-05, "loss": 0.6838, "step": 10116 }, { "epoch": 80.936, "grad_norm": 41.473514556884766, "learning_rate": 1.0631111111111111e-05, "loss": 0.9354, "step": 10117 }, { "epoch": 80.944, "grad_norm": 15.962801933288574, "learning_rate": 1.0626666666666666e-05, "loss": 0.8692, "step": 10118 }, { "epoch": 80.952, "grad_norm": 13.191823959350586, "learning_rate": 1.0622222222222223e-05, "loss": 0.9083, "step": 10119 }, { "epoch": 80.96, "grad_norm": 32.413848876953125, "learning_rate": 1.0617777777777777e-05, "loss": 0.626, "step": 10120 }, { "epoch": 80.968, "grad_norm": 24.086523056030273, "learning_rate": 1.0613333333333334e-05, "loss": 0.641, "step": 10121 }, { "epoch": 80.976, "grad_norm": 21.4399356842041, "learning_rate": 1.0608888888888889e-05, "loss": 0.6599, "step": 10122 }, { "epoch": 80.984, "grad_norm": 19.044837951660156, "learning_rate": 1.0604444444444445e-05, "loss": 0.6329, "step": 10123 }, { "epoch": 80.992, "grad_norm": 55.30649948120117, "learning_rate": 1.06e-05, "loss": 1.4157, "step": 10124 }, { "epoch": 81.0, "grad_norm": 15.544661521911621, "learning_rate": 1.0595555555555555e-05, "loss": 0.5882, "step": 10125 }, { "epoch": 81.0, "eval_loss": 0.929075300693512, "eval_map": 0.4712, "eval_map_50": 0.8077, "eval_map_75": 0.4521, "eval_map_Coverall": 0.6917, "eval_map_Face_Shield": 0.574, "eval_map_Gloves": 0.3609, "eval_map_Goggles": 0.2825, "eval_map_Mask": 0.4469, "eval_map_large": 0.6742, "eval_map_medium": 0.3195, "eval_map_small": 0.3614, "eval_mar_1": 0.3505, "eval_mar_10": 0.5939, "eval_mar_100": 0.6103, "eval_mar_100_Coverall": 0.7756, "eval_mar_100_Face_Shield": 0.7176, "eval_mar_100_Gloves": 0.4951, "eval_mar_100_Goggles": 0.525, "eval_mar_100_Mask": 0.5385, "eval_mar_large": 0.8017, "eval_mar_medium": 0.4607, "eval_mar_small": 0.4286, "eval_runtime": 0.9213, "eval_samples_per_second": 31.478, "eval_steps_per_second": 2.171, "step": 10125 }, { "epoch": 81.008, "grad_norm": 13.38981819152832, "learning_rate": 1.0591111111111112e-05, "loss": 0.6312, "step": 10126 }, { "epoch": 81.016, "grad_norm": 20.69681167602539, "learning_rate": 1.0586666666666666e-05, "loss": 1.057, "step": 10127 }, { "epoch": 81.024, "grad_norm": 41.95026779174805, "learning_rate": 1.0582222222222223e-05, "loss": 0.7198, "step": 10128 }, { "epoch": 81.032, "grad_norm": 25.422021865844727, "learning_rate": 1.0577777777777778e-05, "loss": 0.4755, "step": 10129 }, { "epoch": 81.04, "grad_norm": 29.753799438476562, "learning_rate": 1.0573333333333334e-05, "loss": 0.9774, "step": 10130 }, { "epoch": 81.048, "grad_norm": 16.184415817260742, "learning_rate": 1.056888888888889e-05, "loss": 0.856, "step": 10131 }, { "epoch": 81.056, "grad_norm": 39.44356155395508, "learning_rate": 1.0564444444444446e-05, "loss": 0.7005, "step": 10132 }, { "epoch": 81.064, "grad_norm": 22.129953384399414, "learning_rate": 1.056e-05, "loss": 0.7853, "step": 10133 }, { "epoch": 81.072, "grad_norm": 52.55325698852539, "learning_rate": 1.0555555555555555e-05, "loss": 0.6941, "step": 10134 }, { "epoch": 81.08, "grad_norm": 30.432758331298828, "learning_rate": 1.0551111111111112e-05, "loss": 0.6623, "step": 10135 }, { "epoch": 81.088, "grad_norm": 32.184295654296875, "learning_rate": 1.0546666666666667e-05, "loss": 0.5834, "step": 10136 }, { "epoch": 81.096, "grad_norm": 41.95901107788086, "learning_rate": 1.0542222222222223e-05, "loss": 0.8363, "step": 10137 }, { "epoch": 81.104, "grad_norm": 16.079362869262695, "learning_rate": 1.0537777777777778e-05, "loss": 0.8258, "step": 10138 }, { "epoch": 81.112, "grad_norm": 50.70534896850586, "learning_rate": 1.0533333333333335e-05, "loss": 0.7053, "step": 10139 }, { "epoch": 81.12, "grad_norm": 25.593082427978516, "learning_rate": 1.052888888888889e-05, "loss": 0.4935, "step": 10140 }, { "epoch": 81.128, "grad_norm": 16.508922576904297, "learning_rate": 1.0524444444444444e-05, "loss": 0.8338, "step": 10141 }, { "epoch": 81.136, "grad_norm": 21.785558700561523, "learning_rate": 1.0520000000000001e-05, "loss": 0.7837, "step": 10142 }, { "epoch": 81.144, "grad_norm": 26.012372970581055, "learning_rate": 1.0515555555555556e-05, "loss": 0.9984, "step": 10143 }, { "epoch": 81.152, "grad_norm": 22.24598503112793, "learning_rate": 1.0511111111111112e-05, "loss": 0.5998, "step": 10144 }, { "epoch": 81.16, "grad_norm": 20.962177276611328, "learning_rate": 1.0506666666666667e-05, "loss": 0.6705, "step": 10145 }, { "epoch": 81.168, "grad_norm": 36.20521926879883, "learning_rate": 1.0502222222222224e-05, "loss": 0.8196, "step": 10146 }, { "epoch": 81.176, "grad_norm": 20.286346435546875, "learning_rate": 1.0497777777777779e-05, "loss": 0.8802, "step": 10147 }, { "epoch": 81.184, "grad_norm": 34.03537368774414, "learning_rate": 1.0493333333333333e-05, "loss": 0.7181, "step": 10148 }, { "epoch": 81.192, "grad_norm": 20.416475296020508, "learning_rate": 1.048888888888889e-05, "loss": 1.0609, "step": 10149 }, { "epoch": 81.2, "grad_norm": 27.811403274536133, "learning_rate": 1.0484444444444445e-05, "loss": 0.6957, "step": 10150 }, { "epoch": 81.208, "grad_norm": 24.71422576904297, "learning_rate": 1.0480000000000001e-05, "loss": 1.1681, "step": 10151 }, { "epoch": 81.216, "grad_norm": 16.828279495239258, "learning_rate": 1.0475555555555556e-05, "loss": 0.6623, "step": 10152 }, { "epoch": 81.224, "grad_norm": 25.122478485107422, "learning_rate": 1.0471111111111113e-05, "loss": 0.7635, "step": 10153 }, { "epoch": 81.232, "grad_norm": 53.684303283691406, "learning_rate": 1.0466666666666668e-05, "loss": 0.7591, "step": 10154 }, { "epoch": 81.24, "grad_norm": 20.95306968688965, "learning_rate": 1.0462222222222224e-05, "loss": 1.1384, "step": 10155 }, { "epoch": 81.248, "grad_norm": 30.3349552154541, "learning_rate": 1.0457777777777779e-05, "loss": 0.8757, "step": 10156 }, { "epoch": 81.256, "grad_norm": 30.478132247924805, "learning_rate": 1.0453333333333334e-05, "loss": 1.0974, "step": 10157 }, { "epoch": 81.264, "grad_norm": 59.37400817871094, "learning_rate": 1.044888888888889e-05, "loss": 0.6867, "step": 10158 }, { "epoch": 81.272, "grad_norm": 34.15180587768555, "learning_rate": 1.0444444444444445e-05, "loss": 0.8347, "step": 10159 }, { "epoch": 81.28, "grad_norm": 36.938541412353516, "learning_rate": 1.0440000000000002e-05, "loss": 0.7442, "step": 10160 }, { "epoch": 81.288, "grad_norm": 18.495859146118164, "learning_rate": 1.0435555555555555e-05, "loss": 1.6457, "step": 10161 }, { "epoch": 81.296, "grad_norm": 38.26590347290039, "learning_rate": 1.0431111111111111e-05, "loss": 0.7768, "step": 10162 }, { "epoch": 81.304, "grad_norm": 57.143001556396484, "learning_rate": 1.0426666666666666e-05, "loss": 0.7605, "step": 10163 }, { "epoch": 81.312, "grad_norm": 98.25616455078125, "learning_rate": 1.0422222222222223e-05, "loss": 0.6655, "step": 10164 }, { "epoch": 81.32, "grad_norm": 19.954761505126953, "learning_rate": 1.0417777777777778e-05, "loss": 0.919, "step": 10165 }, { "epoch": 81.328, "grad_norm": 28.388141632080078, "learning_rate": 1.0413333333333332e-05, "loss": 1.1441, "step": 10166 }, { "epoch": 81.336, "grad_norm": 13.155719757080078, "learning_rate": 1.0408888888888889e-05, "loss": 0.6997, "step": 10167 }, { "epoch": 81.344, "grad_norm": 19.497177124023438, "learning_rate": 1.0404444444444444e-05, "loss": 0.6589, "step": 10168 }, { "epoch": 81.352, "grad_norm": 27.099977493286133, "learning_rate": 1.04e-05, "loss": 0.8533, "step": 10169 }, { "epoch": 81.36, "grad_norm": 19.784212112426758, "learning_rate": 1.0395555555555555e-05, "loss": 0.7815, "step": 10170 }, { "epoch": 81.368, "grad_norm": 157.94869995117188, "learning_rate": 1.0391111111111112e-05, "loss": 0.8534, "step": 10171 }, { "epoch": 81.376, "grad_norm": 17.8778133392334, "learning_rate": 1.0386666666666667e-05, "loss": 0.6271, "step": 10172 }, { "epoch": 81.384, "grad_norm": 33.33106231689453, "learning_rate": 1.0382222222222223e-05, "loss": 0.6157, "step": 10173 }, { "epoch": 81.392, "grad_norm": 21.507322311401367, "learning_rate": 1.0377777777777778e-05, "loss": 1.2025, "step": 10174 }, { "epoch": 81.4, "grad_norm": 25.64964485168457, "learning_rate": 1.0373333333333333e-05, "loss": 0.6049, "step": 10175 }, { "epoch": 81.408, "grad_norm": 17.959810256958008, "learning_rate": 1.036888888888889e-05, "loss": 0.9001, "step": 10176 }, { "epoch": 81.416, "grad_norm": 18.102609634399414, "learning_rate": 1.0364444444444444e-05, "loss": 0.6559, "step": 10177 }, { "epoch": 81.424, "grad_norm": 14.696127891540527, "learning_rate": 1.036e-05, "loss": 0.6748, "step": 10178 }, { "epoch": 81.432, "grad_norm": 17.55080223083496, "learning_rate": 1.0355555555555556e-05, "loss": 0.8004, "step": 10179 }, { "epoch": 81.44, "grad_norm": 25.221708297729492, "learning_rate": 1.0351111111111112e-05, "loss": 1.0935, "step": 10180 }, { "epoch": 81.448, "grad_norm": 21.065866470336914, "learning_rate": 1.0346666666666667e-05, "loss": 0.7513, "step": 10181 }, { "epoch": 81.456, "grad_norm": 80.08181762695312, "learning_rate": 1.0342222222222222e-05, "loss": 0.8205, "step": 10182 }, { "epoch": 81.464, "grad_norm": 45.83925247192383, "learning_rate": 1.0337777777777778e-05, "loss": 0.7062, "step": 10183 }, { "epoch": 81.472, "grad_norm": 25.483299255371094, "learning_rate": 1.0333333333333333e-05, "loss": 0.9209, "step": 10184 }, { "epoch": 81.48, "grad_norm": 27.50742530822754, "learning_rate": 1.032888888888889e-05, "loss": 1.1221, "step": 10185 }, { "epoch": 81.488, "grad_norm": 15.601913452148438, "learning_rate": 1.0324444444444445e-05, "loss": 0.9291, "step": 10186 }, { "epoch": 81.496, "grad_norm": 49.907501220703125, "learning_rate": 1.0320000000000001e-05, "loss": 0.8945, "step": 10187 }, { "epoch": 81.504, "grad_norm": 20.555593490600586, "learning_rate": 1.0315555555555556e-05, "loss": 0.949, "step": 10188 }, { "epoch": 81.512, "grad_norm": 48.2534065246582, "learning_rate": 1.031111111111111e-05, "loss": 0.7887, "step": 10189 }, { "epoch": 81.52, "grad_norm": 24.154937744140625, "learning_rate": 1.0306666666666667e-05, "loss": 0.8453, "step": 10190 }, { "epoch": 81.528, "grad_norm": 26.887229919433594, "learning_rate": 1.0302222222222222e-05, "loss": 0.9011, "step": 10191 }, { "epoch": 81.536, "grad_norm": 16.09562110900879, "learning_rate": 1.0297777777777779e-05, "loss": 0.949, "step": 10192 }, { "epoch": 81.544, "grad_norm": 41.08823013305664, "learning_rate": 1.0293333333333334e-05, "loss": 1.1049, "step": 10193 }, { "epoch": 81.552, "grad_norm": 12.247407913208008, "learning_rate": 1.028888888888889e-05, "loss": 0.7289, "step": 10194 }, { "epoch": 81.56, "grad_norm": 33.720726013183594, "learning_rate": 1.0284444444444445e-05, "loss": 0.9447, "step": 10195 }, { "epoch": 81.568, "grad_norm": 15.936554908752441, "learning_rate": 1.0280000000000002e-05, "loss": 0.7201, "step": 10196 }, { "epoch": 81.576, "grad_norm": 19.715042114257812, "learning_rate": 1.0275555555555556e-05, "loss": 0.9152, "step": 10197 }, { "epoch": 81.584, "grad_norm": 73.15982055664062, "learning_rate": 1.0271111111111111e-05, "loss": 1.0454, "step": 10198 }, { "epoch": 81.592, "grad_norm": 25.7061767578125, "learning_rate": 1.0266666666666668e-05, "loss": 0.5073, "step": 10199 }, { "epoch": 81.6, "grad_norm": 104.1511001586914, "learning_rate": 1.0262222222222223e-05, "loss": 1.1768, "step": 10200 }, { "epoch": 81.608, "grad_norm": 24.306669235229492, "learning_rate": 1.0257777777777779e-05, "loss": 2.0513, "step": 10201 }, { "epoch": 81.616, "grad_norm": 47.378265380859375, "learning_rate": 1.0253333333333334e-05, "loss": 0.6821, "step": 10202 }, { "epoch": 81.624, "grad_norm": 28.506366729736328, "learning_rate": 1.024888888888889e-05, "loss": 0.666, "step": 10203 }, { "epoch": 81.632, "grad_norm": 23.62654685974121, "learning_rate": 1.0244444444444445e-05, "loss": 0.7477, "step": 10204 }, { "epoch": 81.64, "grad_norm": 14.523857116699219, "learning_rate": 1.024e-05, "loss": 0.6826, "step": 10205 }, { "epoch": 81.648, "grad_norm": 35.49387741088867, "learning_rate": 1.0235555555555557e-05, "loss": 0.7425, "step": 10206 }, { "epoch": 81.656, "grad_norm": 20.72652244567871, "learning_rate": 1.0231111111111112e-05, "loss": 0.851, "step": 10207 }, { "epoch": 81.664, "grad_norm": 25.83449363708496, "learning_rate": 1.0226666666666668e-05, "loss": 0.6946, "step": 10208 }, { "epoch": 81.672, "grad_norm": 18.121545791625977, "learning_rate": 1.0222222222222223e-05, "loss": 0.5942, "step": 10209 }, { "epoch": 81.68, "grad_norm": 24.02880859375, "learning_rate": 1.021777777777778e-05, "loss": 1.1702, "step": 10210 }, { "epoch": 81.688, "grad_norm": 71.34529876708984, "learning_rate": 1.0213333333333334e-05, "loss": 2.8351, "step": 10211 }, { "epoch": 81.696, "grad_norm": 25.442337036132812, "learning_rate": 1.020888888888889e-05, "loss": 0.7148, "step": 10212 }, { "epoch": 81.704, "grad_norm": 23.618690490722656, "learning_rate": 1.0204444444444444e-05, "loss": 0.8857, "step": 10213 }, { "epoch": 81.712, "grad_norm": 21.218032836914062, "learning_rate": 1.02e-05, "loss": 0.9014, "step": 10214 }, { "epoch": 81.72, "grad_norm": 28.091304779052734, "learning_rate": 1.0195555555555555e-05, "loss": 0.8785, "step": 10215 }, { "epoch": 81.728, "grad_norm": 16.178613662719727, "learning_rate": 1.019111111111111e-05, "loss": 0.8024, "step": 10216 }, { "epoch": 81.736, "grad_norm": 22.91112518310547, "learning_rate": 1.0186666666666667e-05, "loss": 0.7961, "step": 10217 }, { "epoch": 81.744, "grad_norm": 20.824909210205078, "learning_rate": 1.0182222222222222e-05, "loss": 0.5887, "step": 10218 }, { "epoch": 81.752, "grad_norm": 55.26679229736328, "learning_rate": 1.0177777777777778e-05, "loss": 0.6886, "step": 10219 }, { "epoch": 81.76, "grad_norm": 25.00462532043457, "learning_rate": 1.0173333333333333e-05, "loss": 1.3315, "step": 10220 }, { "epoch": 81.768, "grad_norm": 27.060060501098633, "learning_rate": 1.016888888888889e-05, "loss": 0.954, "step": 10221 }, { "epoch": 81.776, "grad_norm": 28.97288703918457, "learning_rate": 1.0164444444444444e-05, "loss": 0.636, "step": 10222 }, { "epoch": 81.784, "grad_norm": 20.672332763671875, "learning_rate": 1.016e-05, "loss": 0.9474, "step": 10223 }, { "epoch": 81.792, "grad_norm": 35.651283264160156, "learning_rate": 1.0155555555555556e-05, "loss": 0.8305, "step": 10224 }, { "epoch": 81.8, "grad_norm": 32.09408950805664, "learning_rate": 1.015111111111111e-05, "loss": 0.7697, "step": 10225 }, { "epoch": 81.808, "grad_norm": 28.74239158630371, "learning_rate": 1.0146666666666667e-05, "loss": 0.8891, "step": 10226 }, { "epoch": 81.816, "grad_norm": 26.97193145751953, "learning_rate": 1.0142222222222222e-05, "loss": 0.9107, "step": 10227 }, { "epoch": 81.824, "grad_norm": 16.966089248657227, "learning_rate": 1.0137777777777779e-05, "loss": 1.1234, "step": 10228 }, { "epoch": 81.832, "grad_norm": 17.41078758239746, "learning_rate": 1.0133333333333333e-05, "loss": 0.8931, "step": 10229 }, { "epoch": 81.84, "grad_norm": 23.5035400390625, "learning_rate": 1.012888888888889e-05, "loss": 0.7485, "step": 10230 }, { "epoch": 81.848, "grad_norm": 26.90778923034668, "learning_rate": 1.0124444444444445e-05, "loss": 0.977, "step": 10231 }, { "epoch": 81.856, "grad_norm": 14.242965698242188, "learning_rate": 1.012e-05, "loss": 0.9964, "step": 10232 }, { "epoch": 81.864, "grad_norm": 16.301137924194336, "learning_rate": 1.0115555555555556e-05, "loss": 0.8396, "step": 10233 }, { "epoch": 81.872, "grad_norm": 21.4726505279541, "learning_rate": 1.0111111111111111e-05, "loss": 0.7996, "step": 10234 }, { "epoch": 81.88, "grad_norm": 25.883716583251953, "learning_rate": 1.0106666666666668e-05, "loss": 0.977, "step": 10235 }, { "epoch": 81.888, "grad_norm": 22.116792678833008, "learning_rate": 1.0102222222222222e-05, "loss": 0.8351, "step": 10236 }, { "epoch": 81.896, "grad_norm": 24.26121711730957, "learning_rate": 1.0097777777777779e-05, "loss": 1.6821, "step": 10237 }, { "epoch": 81.904, "grad_norm": 23.826950073242188, "learning_rate": 1.0093333333333334e-05, "loss": 0.7663, "step": 10238 }, { "epoch": 81.912, "grad_norm": 16.910358428955078, "learning_rate": 1.0088888888888889e-05, "loss": 0.7305, "step": 10239 }, { "epoch": 81.92, "grad_norm": 30.473562240600586, "learning_rate": 1.0084444444444445e-05, "loss": 1.6143, "step": 10240 }, { "epoch": 81.928, "grad_norm": 26.253618240356445, "learning_rate": 1.008e-05, "loss": 0.8984, "step": 10241 }, { "epoch": 81.936, "grad_norm": 22.14686393737793, "learning_rate": 1.0075555555555557e-05, "loss": 1.1006, "step": 10242 }, { "epoch": 81.944, "grad_norm": 42.586090087890625, "learning_rate": 1.0071111111111111e-05, "loss": 1.5979, "step": 10243 }, { "epoch": 81.952, "grad_norm": 16.08417510986328, "learning_rate": 1.0066666666666668e-05, "loss": 0.879, "step": 10244 }, { "epoch": 81.96, "grad_norm": 37.991519927978516, "learning_rate": 1.0062222222222223e-05, "loss": 1.0739, "step": 10245 }, { "epoch": 81.968, "grad_norm": 18.606914520263672, "learning_rate": 1.0057777777777778e-05, "loss": 0.8247, "step": 10246 }, { "epoch": 81.976, "grad_norm": 28.378843307495117, "learning_rate": 1.0053333333333334e-05, "loss": 0.9483, "step": 10247 }, { "epoch": 81.984, "grad_norm": 47.601531982421875, "learning_rate": 1.0048888888888889e-05, "loss": 0.7069, "step": 10248 }, { "epoch": 81.992, "grad_norm": 48.549339294433594, "learning_rate": 1.0044444444444446e-05, "loss": 1.7461, "step": 10249 }, { "epoch": 82.0, "grad_norm": 107.4670181274414, "learning_rate": 1.004e-05, "loss": 0.6977, "step": 10250 }, { "epoch": 82.0, "eval_loss": 0.9332587122917175, "eval_map": 0.4654, "eval_map_50": 0.8147, "eval_map_75": 0.4531, "eval_map_Coverall": 0.696, "eval_map_Face_Shield": 0.5467, "eval_map_Gloves": 0.3497, "eval_map_Goggles": 0.3031, "eval_map_Mask": 0.4315, "eval_map_large": 0.6777, "eval_map_medium": 0.3233, "eval_map_small": 0.4231, "eval_mar_1": 0.3549, "eval_mar_10": 0.5979, "eval_mar_100": 0.6072, "eval_mar_100_Coverall": 0.78, "eval_mar_100_Face_Shield": 0.7235, "eval_mar_100_Gloves": 0.5082, "eval_mar_100_Goggles": 0.5031, "eval_mar_100_Mask": 0.5212, "eval_mar_large": 0.7845, "eval_mar_medium": 0.4874, "eval_mar_small": 0.4638, "eval_runtime": 0.9213, "eval_samples_per_second": 31.478, "eval_steps_per_second": 2.171, "step": 10250 }, { "epoch": 82.008, "grad_norm": 19.08026123046875, "learning_rate": 1.0035555555555557e-05, "loss": 1.0761, "step": 10251 }, { "epoch": 82.016, "grad_norm": 16.555072784423828, "learning_rate": 1.0031111111111112e-05, "loss": 0.6788, "step": 10252 }, { "epoch": 82.024, "grad_norm": 31.972084045410156, "learning_rate": 1.0026666666666668e-05, "loss": 1.0008, "step": 10253 }, { "epoch": 82.032, "grad_norm": 32.482635498046875, "learning_rate": 1.0022222222222223e-05, "loss": 0.6744, "step": 10254 }, { "epoch": 82.04, "grad_norm": 23.821123123168945, "learning_rate": 1.0017777777777778e-05, "loss": 0.9213, "step": 10255 }, { "epoch": 82.048, "grad_norm": 21.948711395263672, "learning_rate": 1.0013333333333335e-05, "loss": 0.6375, "step": 10256 }, { "epoch": 82.056, "grad_norm": 25.178823471069336, "learning_rate": 1.000888888888889e-05, "loss": 0.8246, "step": 10257 }, { "epoch": 82.064, "grad_norm": 14.968878746032715, "learning_rate": 1.0004444444444446e-05, "loss": 0.7114, "step": 10258 }, { "epoch": 82.072, "grad_norm": 19.438846588134766, "learning_rate": 1e-05, "loss": 0.9545, "step": 10259 }, { "epoch": 82.08, "grad_norm": 28.127384185791016, "learning_rate": 9.995555555555557e-06, "loss": 0.5074, "step": 10260 }, { "epoch": 82.088, "grad_norm": 19.4256534576416, "learning_rate": 9.991111111111112e-06, "loss": 0.8396, "step": 10261 }, { "epoch": 82.096, "grad_norm": 37.95643615722656, "learning_rate": 9.986666666666667e-06, "loss": 0.7555, "step": 10262 }, { "epoch": 82.104, "grad_norm": 16.44987678527832, "learning_rate": 9.982222222222224e-06, "loss": 0.7909, "step": 10263 }, { "epoch": 82.112, "grad_norm": 17.497556686401367, "learning_rate": 9.977777777777778e-06, "loss": 0.6977, "step": 10264 }, { "epoch": 82.12, "grad_norm": 18.93461036682129, "learning_rate": 9.973333333333333e-06, "loss": 0.7276, "step": 10265 }, { "epoch": 82.128, "grad_norm": 20.30219268798828, "learning_rate": 9.968888888888888e-06, "loss": 1.0578, "step": 10266 }, { "epoch": 82.136, "grad_norm": 70.77417755126953, "learning_rate": 9.964444444444445e-06, "loss": 0.7882, "step": 10267 }, { "epoch": 82.144, "grad_norm": 28.9427547454834, "learning_rate": 9.96e-06, "loss": 0.8177, "step": 10268 }, { "epoch": 82.152, "grad_norm": 19.298500061035156, "learning_rate": 9.955555555555556e-06, "loss": 0.9333, "step": 10269 }, { "epoch": 82.16, "grad_norm": 28.782594680786133, "learning_rate": 9.951111111111111e-06, "loss": 0.5688, "step": 10270 }, { "epoch": 82.168, "grad_norm": 24.559528350830078, "learning_rate": 9.946666666666667e-06, "loss": 0.7624, "step": 10271 }, { "epoch": 82.176, "grad_norm": 23.043731689453125, "learning_rate": 9.942222222222222e-06, "loss": 1.0603, "step": 10272 }, { "epoch": 82.184, "grad_norm": 16.161827087402344, "learning_rate": 9.937777777777777e-06, "loss": 0.8857, "step": 10273 }, { "epoch": 82.192, "grad_norm": 15.273265838623047, "learning_rate": 9.933333333333334e-06, "loss": 0.6525, "step": 10274 }, { "epoch": 82.2, "grad_norm": 17.89559555053711, "learning_rate": 9.928888888888889e-06, "loss": 0.8065, "step": 10275 }, { "epoch": 82.208, "grad_norm": 40.60629653930664, "learning_rate": 9.924444444444445e-06, "loss": 0.9661, "step": 10276 }, { "epoch": 82.216, "grad_norm": 88.90424346923828, "learning_rate": 9.92e-06, "loss": 1.1068, "step": 10277 }, { "epoch": 82.224, "grad_norm": 16.727449417114258, "learning_rate": 9.915555555555556e-06, "loss": 0.6946, "step": 10278 }, { "epoch": 82.232, "grad_norm": 17.345134735107422, "learning_rate": 9.911111111111111e-06, "loss": 0.7281, "step": 10279 }, { "epoch": 82.24, "grad_norm": 16.83430290222168, "learning_rate": 9.906666666666666e-06, "loss": 1.1242, "step": 10280 }, { "epoch": 82.248, "grad_norm": 28.461668014526367, "learning_rate": 9.902222222222223e-06, "loss": 0.8661, "step": 10281 }, { "epoch": 82.256, "grad_norm": 25.03466033935547, "learning_rate": 9.897777777777778e-06, "loss": 0.9247, "step": 10282 }, { "epoch": 82.264, "grad_norm": 13.728727340698242, "learning_rate": 9.893333333333334e-06, "loss": 0.6286, "step": 10283 }, { "epoch": 82.272, "grad_norm": 15.9323148727417, "learning_rate": 9.888888888888889e-06, "loss": 1.0248, "step": 10284 }, { "epoch": 82.28, "grad_norm": 16.095773696899414, "learning_rate": 9.884444444444445e-06, "loss": 1.1657, "step": 10285 }, { "epoch": 82.288, "grad_norm": 42.148681640625, "learning_rate": 9.88e-06, "loss": 0.7757, "step": 10286 }, { "epoch": 82.296, "grad_norm": 28.000932693481445, "learning_rate": 9.875555555555555e-06, "loss": 1.0391, "step": 10287 }, { "epoch": 82.304, "grad_norm": 35.15178298950195, "learning_rate": 9.871111111111112e-06, "loss": 0.5682, "step": 10288 }, { "epoch": 82.312, "grad_norm": 13.075769424438477, "learning_rate": 9.866666666666667e-06, "loss": 0.7516, "step": 10289 }, { "epoch": 82.32, "grad_norm": 34.18351364135742, "learning_rate": 9.862222222222223e-06, "loss": 0.9945, "step": 10290 }, { "epoch": 82.328, "grad_norm": 19.57748031616211, "learning_rate": 9.857777777777778e-06, "loss": 0.8497, "step": 10291 }, { "epoch": 82.336, "grad_norm": 25.239168167114258, "learning_rate": 9.853333333333334e-06, "loss": 0.6666, "step": 10292 }, { "epoch": 82.344, "grad_norm": 38.060882568359375, "learning_rate": 9.84888888888889e-06, "loss": 2.6835, "step": 10293 }, { "epoch": 82.352, "grad_norm": 58.304931640625, "learning_rate": 9.844444444444446e-06, "loss": 0.747, "step": 10294 }, { "epoch": 82.36, "grad_norm": 17.460346221923828, "learning_rate": 9.84e-06, "loss": 0.7897, "step": 10295 }, { "epoch": 82.368, "grad_norm": 20.64019775390625, "learning_rate": 9.835555555555556e-06, "loss": 0.8141, "step": 10296 }, { "epoch": 82.376, "grad_norm": 56.40934753417969, "learning_rate": 9.831111111111112e-06, "loss": 0.7344, "step": 10297 }, { "epoch": 82.384, "grad_norm": 18.877567291259766, "learning_rate": 9.826666666666667e-06, "loss": 0.6671, "step": 10298 }, { "epoch": 82.392, "grad_norm": 17.499929428100586, "learning_rate": 9.822222222222223e-06, "loss": 0.8473, "step": 10299 }, { "epoch": 82.4, "grad_norm": 35.556800842285156, "learning_rate": 9.817777777777778e-06, "loss": 0.8831, "step": 10300 }, { "epoch": 82.408, "grad_norm": 19.65022850036621, "learning_rate": 9.813333333333335e-06, "loss": 0.9193, "step": 10301 }, { "epoch": 82.416, "grad_norm": 16.603174209594727, "learning_rate": 9.80888888888889e-06, "loss": 0.9399, "step": 10302 }, { "epoch": 82.424, "grad_norm": 17.893970489501953, "learning_rate": 9.804444444444444e-06, "loss": 1.0189, "step": 10303 }, { "epoch": 82.432, "grad_norm": 16.056148529052734, "learning_rate": 9.800000000000001e-06, "loss": 0.7445, "step": 10304 }, { "epoch": 82.44, "grad_norm": 33.28110885620117, "learning_rate": 9.795555555555556e-06, "loss": 1.1508, "step": 10305 }, { "epoch": 82.448, "grad_norm": 87.17369079589844, "learning_rate": 9.791111111111112e-06, "loss": 0.9014, "step": 10306 }, { "epoch": 82.456, "grad_norm": 42.14162063598633, "learning_rate": 9.786666666666667e-06, "loss": 2.1346, "step": 10307 }, { "epoch": 82.464, "grad_norm": 47.754119873046875, "learning_rate": 9.782222222222224e-06, "loss": 0.7912, "step": 10308 }, { "epoch": 82.472, "grad_norm": 35.036376953125, "learning_rate": 9.777777777777779e-06, "loss": 0.478, "step": 10309 }, { "epoch": 82.48, "grad_norm": 33.943416595458984, "learning_rate": 9.773333333333333e-06, "loss": 1.3906, "step": 10310 }, { "epoch": 82.488, "grad_norm": 28.211164474487305, "learning_rate": 9.76888888888889e-06, "loss": 0.8804, "step": 10311 }, { "epoch": 82.496, "grad_norm": 15.87792682647705, "learning_rate": 9.764444444444445e-06, "loss": 0.9435, "step": 10312 }, { "epoch": 82.504, "grad_norm": 31.903749465942383, "learning_rate": 9.760000000000001e-06, "loss": 0.7122, "step": 10313 }, { "epoch": 82.512, "grad_norm": 29.728713989257812, "learning_rate": 9.755555555555556e-06, "loss": 0.8115, "step": 10314 }, { "epoch": 82.52, "grad_norm": 21.531885147094727, "learning_rate": 9.751111111111113e-06, "loss": 0.644, "step": 10315 }, { "epoch": 82.528, "grad_norm": 20.703426361083984, "learning_rate": 9.746666666666666e-06, "loss": 0.5788, "step": 10316 }, { "epoch": 82.536, "grad_norm": 25.94281005859375, "learning_rate": 9.742222222222222e-06, "loss": 0.9136, "step": 10317 }, { "epoch": 82.544, "grad_norm": 14.791070938110352, "learning_rate": 9.737777777777777e-06, "loss": 0.7625, "step": 10318 }, { "epoch": 82.552, "grad_norm": 40.35537338256836, "learning_rate": 9.733333333333334e-06, "loss": 0.8301, "step": 10319 }, { "epoch": 82.56, "grad_norm": 32.606117248535156, "learning_rate": 9.728888888888889e-06, "loss": 0.6804, "step": 10320 }, { "epoch": 82.568, "grad_norm": 13.553171157836914, "learning_rate": 9.724444444444444e-06, "loss": 0.8708, "step": 10321 }, { "epoch": 82.576, "grad_norm": 20.880733489990234, "learning_rate": 9.72e-06, "loss": 0.9118, "step": 10322 }, { "epoch": 82.584, "grad_norm": 11.941482543945312, "learning_rate": 9.715555555555555e-06, "loss": 1.002, "step": 10323 }, { "epoch": 82.592, "grad_norm": 18.188692092895508, "learning_rate": 9.711111111111111e-06, "loss": 0.7759, "step": 10324 }, { "epoch": 82.6, "grad_norm": 19.709854125976562, "learning_rate": 9.706666666666666e-06, "loss": 0.8958, "step": 10325 }, { "epoch": 82.608, "grad_norm": 18.840850830078125, "learning_rate": 9.702222222222223e-06, "loss": 0.7868, "step": 10326 }, { "epoch": 82.616, "grad_norm": 26.32664680480957, "learning_rate": 9.697777777777778e-06, "loss": 1.0746, "step": 10327 }, { "epoch": 82.624, "grad_norm": 30.918636322021484, "learning_rate": 9.693333333333334e-06, "loss": 1.3165, "step": 10328 }, { "epoch": 82.632, "grad_norm": 28.37993049621582, "learning_rate": 9.688888888888889e-06, "loss": 0.5092, "step": 10329 }, { "epoch": 82.64, "grad_norm": 28.547428131103516, "learning_rate": 9.684444444444444e-06, "loss": 0.8975, "step": 10330 }, { "epoch": 82.648, "grad_norm": 28.756858825683594, "learning_rate": 9.68e-06, "loss": 0.8428, "step": 10331 }, { "epoch": 82.656, "grad_norm": 23.757802963256836, "learning_rate": 9.675555555555555e-06, "loss": 1.1156, "step": 10332 }, { "epoch": 82.664, "grad_norm": 31.14637565612793, "learning_rate": 9.671111111111112e-06, "loss": 0.6448, "step": 10333 }, { "epoch": 82.672, "grad_norm": 22.144760131835938, "learning_rate": 9.666666666666667e-06, "loss": 0.8764, "step": 10334 }, { "epoch": 82.68, "grad_norm": 89.77455139160156, "learning_rate": 9.662222222222223e-06, "loss": 0.617, "step": 10335 }, { "epoch": 82.688, "grad_norm": 20.690786361694336, "learning_rate": 9.657777777777778e-06, "loss": 1.7301, "step": 10336 }, { "epoch": 82.696, "grad_norm": 18.53122329711914, "learning_rate": 9.653333333333333e-06, "loss": 0.7536, "step": 10337 }, { "epoch": 82.704, "grad_norm": 13.631302833557129, "learning_rate": 9.64888888888889e-06, "loss": 0.5489, "step": 10338 }, { "epoch": 82.712, "grad_norm": 23.526853561401367, "learning_rate": 9.644444444444444e-06, "loss": 1.0203, "step": 10339 }, { "epoch": 82.72, "grad_norm": 45.62123107910156, "learning_rate": 9.640000000000001e-06, "loss": 0.5868, "step": 10340 }, { "epoch": 82.728, "grad_norm": 29.30400276184082, "learning_rate": 9.635555555555556e-06, "loss": 0.7572, "step": 10341 }, { "epoch": 82.736, "grad_norm": 15.315338134765625, "learning_rate": 9.631111111111112e-06, "loss": 0.8037, "step": 10342 }, { "epoch": 82.744, "grad_norm": 51.3578987121582, "learning_rate": 9.626666666666667e-06, "loss": 1.6028, "step": 10343 }, { "epoch": 82.752, "grad_norm": 21.661022186279297, "learning_rate": 9.622222222222222e-06, "loss": 0.7058, "step": 10344 }, { "epoch": 82.76, "grad_norm": 60.094032287597656, "learning_rate": 9.617777777777778e-06, "loss": 0.9372, "step": 10345 }, { "epoch": 82.768, "grad_norm": 22.653671264648438, "learning_rate": 9.613333333333333e-06, "loss": 0.9098, "step": 10346 }, { "epoch": 82.776, "grad_norm": 35.07878875732422, "learning_rate": 9.60888888888889e-06, "loss": 0.9753, "step": 10347 }, { "epoch": 82.784, "grad_norm": 28.97146987915039, "learning_rate": 9.604444444444445e-06, "loss": 0.853, "step": 10348 }, { "epoch": 82.792, "grad_norm": 56.65012741088867, "learning_rate": 9.600000000000001e-06, "loss": 1.0266, "step": 10349 }, { "epoch": 82.8, "grad_norm": 16.715896606445312, "learning_rate": 9.595555555555556e-06, "loss": 0.5056, "step": 10350 }, { "epoch": 82.808, "grad_norm": 21.841312408447266, "learning_rate": 9.591111111111113e-06, "loss": 0.9889, "step": 10351 }, { "epoch": 82.816, "grad_norm": 15.616612434387207, "learning_rate": 9.586666666666667e-06, "loss": 0.7416, "step": 10352 }, { "epoch": 82.824, "grad_norm": 47.1903076171875, "learning_rate": 9.582222222222222e-06, "loss": 0.7073, "step": 10353 }, { "epoch": 82.832, "grad_norm": 41.60221481323242, "learning_rate": 9.577777777777779e-06, "loss": 0.7022, "step": 10354 }, { "epoch": 82.84, "grad_norm": 21.416868209838867, "learning_rate": 9.573333333333334e-06, "loss": 0.8745, "step": 10355 }, { "epoch": 82.848, "grad_norm": 31.626798629760742, "learning_rate": 9.56888888888889e-06, "loss": 0.7582, "step": 10356 }, { "epoch": 82.856, "grad_norm": 17.35628318786621, "learning_rate": 9.564444444444445e-06, "loss": 0.823, "step": 10357 }, { "epoch": 82.864, "grad_norm": 19.6823673248291, "learning_rate": 9.560000000000002e-06, "loss": 1.6567, "step": 10358 }, { "epoch": 82.872, "grad_norm": 32.303443908691406, "learning_rate": 9.555555555555556e-06, "loss": 1.1845, "step": 10359 }, { "epoch": 82.88, "grad_norm": 27.593351364135742, "learning_rate": 9.551111111111111e-06, "loss": 0.9419, "step": 10360 }, { "epoch": 82.888, "grad_norm": 38.140079498291016, "learning_rate": 9.546666666666668e-06, "loss": 0.7946, "step": 10361 }, { "epoch": 82.896, "grad_norm": 25.64608383178711, "learning_rate": 9.542222222222223e-06, "loss": 0.7897, "step": 10362 }, { "epoch": 82.904, "grad_norm": 22.76123809814453, "learning_rate": 9.53777777777778e-06, "loss": 0.7197, "step": 10363 }, { "epoch": 82.912, "grad_norm": 36.60903549194336, "learning_rate": 9.533333333333334e-06, "loss": 0.7716, "step": 10364 }, { "epoch": 82.92, "grad_norm": 20.3735408782959, "learning_rate": 9.52888888888889e-06, "loss": 0.6596, "step": 10365 }, { "epoch": 82.928, "grad_norm": 78.1407699584961, "learning_rate": 9.524444444444445e-06, "loss": 1.4955, "step": 10366 }, { "epoch": 82.936, "grad_norm": 17.23588752746582, "learning_rate": 9.52e-06, "loss": 0.7152, "step": 10367 }, { "epoch": 82.944, "grad_norm": 33.3177490234375, "learning_rate": 9.515555555555555e-06, "loss": 0.4933, "step": 10368 }, { "epoch": 82.952, "grad_norm": 74.02190399169922, "learning_rate": 9.511111111111112e-06, "loss": 1.9079, "step": 10369 }, { "epoch": 82.96, "grad_norm": 28.251117706298828, "learning_rate": 9.506666666666667e-06, "loss": 0.6599, "step": 10370 }, { "epoch": 82.968, "grad_norm": 37.909507751464844, "learning_rate": 9.502222222222221e-06, "loss": 0.9276, "step": 10371 }, { "epoch": 82.976, "grad_norm": 17.612546920776367, "learning_rate": 9.497777777777778e-06, "loss": 0.8094, "step": 10372 }, { "epoch": 82.984, "grad_norm": 23.66755485534668, "learning_rate": 9.493333333333333e-06, "loss": 0.9673, "step": 10373 }, { "epoch": 82.992, "grad_norm": 24.012964248657227, "learning_rate": 9.48888888888889e-06, "loss": 0.9004, "step": 10374 }, { "epoch": 83.0, "grad_norm": 25.158771514892578, "learning_rate": 9.484444444444444e-06, "loss": 0.6956, "step": 10375 }, { "epoch": 83.0, "eval_loss": 0.9413266777992249, "eval_map": 0.4673, "eval_map_50": 0.8047, "eval_map_75": 0.4947, "eval_map_Coverall": 0.6857, "eval_map_Face_Shield": 0.5794, "eval_map_Gloves": 0.3549, "eval_map_Goggles": 0.2749, "eval_map_Mask": 0.4418, "eval_map_large": 0.6858, "eval_map_medium": 0.3313, "eval_map_small": 0.4361, "eval_mar_1": 0.3519, "eval_mar_10": 0.5951, "eval_mar_100": 0.6065, "eval_mar_100_Coverall": 0.7667, "eval_mar_100_Face_Shield": 0.7706, "eval_mar_100_Gloves": 0.5016, "eval_mar_100_Goggles": 0.4781, "eval_mar_100_Mask": 0.5154, "eval_mar_large": 0.8027, "eval_mar_medium": 0.4799, "eval_mar_small": 0.4819, "eval_runtime": 0.8994, "eval_samples_per_second": 32.243, "eval_steps_per_second": 2.224, "step": 10375 }, { "epoch": 83.008, "grad_norm": 26.631450653076172, "learning_rate": 9.48e-06, "loss": 1.578, "step": 10376 }, { "epoch": 83.016, "grad_norm": 19.833419799804688, "learning_rate": 9.475555555555556e-06, "loss": 0.7316, "step": 10377 }, { "epoch": 83.024, "grad_norm": 16.50539207458496, "learning_rate": 9.47111111111111e-06, "loss": 0.6643, "step": 10378 }, { "epoch": 83.032, "grad_norm": 31.39293098449707, "learning_rate": 9.466666666666667e-06, "loss": 1.1054, "step": 10379 }, { "epoch": 83.04, "grad_norm": 36.669349670410156, "learning_rate": 9.462222222222222e-06, "loss": 1.0119, "step": 10380 }, { "epoch": 83.048, "grad_norm": 15.466306686401367, "learning_rate": 9.457777777777778e-06, "loss": 1.0611, "step": 10381 }, { "epoch": 83.056, "grad_norm": 14.982770919799805, "learning_rate": 9.453333333333333e-06, "loss": 0.5627, "step": 10382 }, { "epoch": 83.064, "grad_norm": 43.83234786987305, "learning_rate": 9.44888888888889e-06, "loss": 1.0778, "step": 10383 }, { "epoch": 83.072, "grad_norm": 114.68360900878906, "learning_rate": 9.444444444444445e-06, "loss": 0.8406, "step": 10384 }, { "epoch": 83.08, "grad_norm": 19.765304565429688, "learning_rate": 9.44e-06, "loss": 0.7994, "step": 10385 }, { "epoch": 83.088, "grad_norm": 50.096946716308594, "learning_rate": 9.435555555555556e-06, "loss": 0.8998, "step": 10386 }, { "epoch": 83.096, "grad_norm": 41.93359375, "learning_rate": 9.43111111111111e-06, "loss": 1.0255, "step": 10387 }, { "epoch": 83.104, "grad_norm": 91.25148010253906, "learning_rate": 9.426666666666667e-06, "loss": 0.7933, "step": 10388 }, { "epoch": 83.112, "grad_norm": 46.29458999633789, "learning_rate": 9.422222222222222e-06, "loss": 0.8861, "step": 10389 }, { "epoch": 83.12, "grad_norm": 18.12113380432129, "learning_rate": 9.417777777777779e-06, "loss": 0.7226, "step": 10390 }, { "epoch": 83.128, "grad_norm": 17.841577529907227, "learning_rate": 9.413333333333334e-06, "loss": 0.6508, "step": 10391 }, { "epoch": 83.136, "grad_norm": 19.112337112426758, "learning_rate": 9.40888888888889e-06, "loss": 0.8867, "step": 10392 }, { "epoch": 83.144, "grad_norm": 18.095800399780273, "learning_rate": 9.404444444444445e-06, "loss": 0.8322, "step": 10393 }, { "epoch": 83.152, "grad_norm": 20.567903518676758, "learning_rate": 9.4e-06, "loss": 0.8555, "step": 10394 }, { "epoch": 83.16, "grad_norm": 25.3778076171875, "learning_rate": 9.395555555555556e-06, "loss": 0.6286, "step": 10395 }, { "epoch": 83.168, "grad_norm": 25.642879486083984, "learning_rate": 9.391111111111111e-06, "loss": 1.0814, "step": 10396 }, { "epoch": 83.176, "grad_norm": 22.452238082885742, "learning_rate": 9.386666666666668e-06, "loss": 0.8647, "step": 10397 }, { "epoch": 83.184, "grad_norm": 15.867181777954102, "learning_rate": 9.382222222222223e-06, "loss": 0.8448, "step": 10398 }, { "epoch": 83.192, "grad_norm": 14.625974655151367, "learning_rate": 9.377777777777779e-06, "loss": 0.8024, "step": 10399 }, { "epoch": 83.2, "grad_norm": 35.17099380493164, "learning_rate": 9.373333333333334e-06, "loss": 0.5551, "step": 10400 }, { "epoch": 83.208, "grad_norm": 23.909626007080078, "learning_rate": 9.368888888888889e-06, "loss": 0.6844, "step": 10401 }, { "epoch": 83.216, "grad_norm": 20.398765563964844, "learning_rate": 9.364444444444445e-06, "loss": 0.8357, "step": 10402 }, { "epoch": 83.224, "grad_norm": 19.209278106689453, "learning_rate": 9.36e-06, "loss": 0.6565, "step": 10403 }, { "epoch": 83.232, "grad_norm": 62.224674224853516, "learning_rate": 9.355555555555557e-06, "loss": 0.6024, "step": 10404 }, { "epoch": 83.24, "grad_norm": 16.928525924682617, "learning_rate": 9.351111111111112e-06, "loss": 1.1294, "step": 10405 }, { "epoch": 83.248, "grad_norm": 29.632173538208008, "learning_rate": 9.346666666666668e-06, "loss": 0.6473, "step": 10406 }, { "epoch": 83.256, "grad_norm": 23.400949478149414, "learning_rate": 9.342222222222223e-06, "loss": 0.6927, "step": 10407 }, { "epoch": 83.264, "grad_norm": 26.42879867553711, "learning_rate": 9.337777777777778e-06, "loss": 0.9394, "step": 10408 }, { "epoch": 83.272, "grad_norm": 12.269787788391113, "learning_rate": 9.333333333333334e-06, "loss": 0.8222, "step": 10409 }, { "epoch": 83.28, "grad_norm": 20.729005813598633, "learning_rate": 9.32888888888889e-06, "loss": 0.7456, "step": 10410 }, { "epoch": 83.288, "grad_norm": 67.2086181640625, "learning_rate": 9.324444444444446e-06, "loss": 0.8059, "step": 10411 }, { "epoch": 83.296, "grad_norm": 51.48210906982422, "learning_rate": 9.32e-06, "loss": 0.6806, "step": 10412 }, { "epoch": 83.304, "grad_norm": 38.14451217651367, "learning_rate": 9.315555555555557e-06, "loss": 1.1748, "step": 10413 }, { "epoch": 83.312, "grad_norm": 24.01934242248535, "learning_rate": 9.311111111111112e-06, "loss": 0.7314, "step": 10414 }, { "epoch": 83.32, "grad_norm": 28.20632553100586, "learning_rate": 9.306666666666668e-06, "loss": 0.8293, "step": 10415 }, { "epoch": 83.328, "grad_norm": 26.17255401611328, "learning_rate": 9.302222222222223e-06, "loss": 0.6572, "step": 10416 }, { "epoch": 83.336, "grad_norm": 64.421630859375, "learning_rate": 9.297777777777778e-06, "loss": 0.5494, "step": 10417 }, { "epoch": 83.344, "grad_norm": 34.628658294677734, "learning_rate": 9.293333333333335e-06, "loss": 0.7877, "step": 10418 }, { "epoch": 83.352, "grad_norm": 16.249021530151367, "learning_rate": 9.288888888888888e-06, "loss": 1.0085, "step": 10419 }, { "epoch": 83.36, "grad_norm": 27.602272033691406, "learning_rate": 9.284444444444444e-06, "loss": 0.9002, "step": 10420 }, { "epoch": 83.368, "grad_norm": 18.63758087158203, "learning_rate": 9.28e-06, "loss": 0.8046, "step": 10421 }, { "epoch": 83.376, "grad_norm": 23.211973190307617, "learning_rate": 9.275555555555556e-06, "loss": 0.8614, "step": 10422 }, { "epoch": 83.384, "grad_norm": 18.17499542236328, "learning_rate": 9.27111111111111e-06, "loss": 0.7521, "step": 10423 }, { "epoch": 83.392, "grad_norm": 23.222707748413086, "learning_rate": 9.266666666666667e-06, "loss": 0.6955, "step": 10424 }, { "epoch": 83.4, "grad_norm": 60.11664581298828, "learning_rate": 9.262222222222222e-06, "loss": 0.8055, "step": 10425 }, { "epoch": 83.408, "grad_norm": 66.87635040283203, "learning_rate": 9.257777777777779e-06, "loss": 0.7577, "step": 10426 }, { "epoch": 83.416, "grad_norm": 25.580747604370117, "learning_rate": 9.253333333333333e-06, "loss": 0.7401, "step": 10427 }, { "epoch": 83.424, "grad_norm": 61.424232482910156, "learning_rate": 9.248888888888888e-06, "loss": 0.7069, "step": 10428 }, { "epoch": 83.432, "grad_norm": 27.452157974243164, "learning_rate": 9.244444444444445e-06, "loss": 1.0615, "step": 10429 }, { "epoch": 83.44, "grad_norm": 19.812204360961914, "learning_rate": 9.24e-06, "loss": 0.8029, "step": 10430 }, { "epoch": 83.448, "grad_norm": 21.342815399169922, "learning_rate": 9.235555555555556e-06, "loss": 0.8133, "step": 10431 }, { "epoch": 83.456, "grad_norm": 14.680985450744629, "learning_rate": 9.231111111111111e-06, "loss": 1.7538, "step": 10432 }, { "epoch": 83.464, "grad_norm": 29.000120162963867, "learning_rate": 9.226666666666668e-06, "loss": 0.846, "step": 10433 }, { "epoch": 83.472, "grad_norm": 140.2631072998047, "learning_rate": 9.222222222222222e-06, "loss": 0.6441, "step": 10434 }, { "epoch": 83.48, "grad_norm": 25.80504608154297, "learning_rate": 9.217777777777777e-06, "loss": 1.2978, "step": 10435 }, { "epoch": 83.488, "grad_norm": 51.171199798583984, "learning_rate": 9.213333333333334e-06, "loss": 0.886, "step": 10436 }, { "epoch": 83.496, "grad_norm": 17.510286331176758, "learning_rate": 9.208888888888889e-06, "loss": 0.7095, "step": 10437 }, { "epoch": 83.504, "grad_norm": 16.44451904296875, "learning_rate": 9.204444444444445e-06, "loss": 0.8782, "step": 10438 }, { "epoch": 83.512, "grad_norm": 17.409814834594727, "learning_rate": 9.2e-06, "loss": 0.9287, "step": 10439 }, { "epoch": 83.52, "grad_norm": 22.22148323059082, "learning_rate": 9.195555555555557e-06, "loss": 0.7806, "step": 10440 }, { "epoch": 83.528, "grad_norm": 13.485466957092285, "learning_rate": 9.191111111111111e-06, "loss": 0.8261, "step": 10441 }, { "epoch": 83.536, "grad_norm": 21.687076568603516, "learning_rate": 9.186666666666666e-06, "loss": 0.7601, "step": 10442 }, { "epoch": 83.544, "grad_norm": 33.39579391479492, "learning_rate": 9.182222222222223e-06, "loss": 0.7324, "step": 10443 }, { "epoch": 83.552, "grad_norm": 24.032691955566406, "learning_rate": 9.177777777777778e-06, "loss": 0.7838, "step": 10444 }, { "epoch": 83.56, "grad_norm": 21.829748153686523, "learning_rate": 9.173333333333334e-06, "loss": 0.8302, "step": 10445 }, { "epoch": 83.568, "grad_norm": 30.989425659179688, "learning_rate": 9.168888888888889e-06, "loss": 0.6036, "step": 10446 }, { "epoch": 83.576, "grad_norm": 25.03839111328125, "learning_rate": 9.164444444444446e-06, "loss": 0.6608, "step": 10447 }, { "epoch": 83.584, "grad_norm": 15.565732955932617, "learning_rate": 9.16e-06, "loss": 0.8586, "step": 10448 }, { "epoch": 83.592, "grad_norm": 27.355133056640625, "learning_rate": 9.155555555555557e-06, "loss": 0.9731, "step": 10449 }, { "epoch": 83.6, "grad_norm": 23.585060119628906, "learning_rate": 9.151111111111112e-06, "loss": 1.0516, "step": 10450 }, { "epoch": 83.608, "grad_norm": 231.201904296875, "learning_rate": 9.146666666666667e-06, "loss": 2.2979, "step": 10451 }, { "epoch": 83.616, "grad_norm": 21.823680877685547, "learning_rate": 9.142222222222223e-06, "loss": 0.7733, "step": 10452 }, { "epoch": 83.624, "grad_norm": 17.79732322692871, "learning_rate": 9.137777777777778e-06, "loss": 0.753, "step": 10453 }, { "epoch": 83.632, "grad_norm": 33.45061492919922, "learning_rate": 9.133333333333335e-06, "loss": 0.6967, "step": 10454 }, { "epoch": 83.64, "grad_norm": 30.092418670654297, "learning_rate": 9.12888888888889e-06, "loss": 0.7145, "step": 10455 }, { "epoch": 83.648, "grad_norm": 22.05440902709961, "learning_rate": 9.124444444444446e-06, "loss": 0.9773, "step": 10456 }, { "epoch": 83.656, "grad_norm": 43.089786529541016, "learning_rate": 9.12e-06, "loss": 1.3676, "step": 10457 }, { "epoch": 83.664, "grad_norm": 28.595029830932617, "learning_rate": 9.115555555555556e-06, "loss": 0.8545, "step": 10458 }, { "epoch": 83.672, "grad_norm": 36.70147705078125, "learning_rate": 9.111111111111112e-06, "loss": 0.9637, "step": 10459 }, { "epoch": 83.68, "grad_norm": 18.298479080200195, "learning_rate": 9.106666666666667e-06, "loss": 1.5013, "step": 10460 }, { "epoch": 83.688, "grad_norm": 37.955047607421875, "learning_rate": 9.102222222222224e-06, "loss": 0.892, "step": 10461 }, { "epoch": 83.696, "grad_norm": 40.14850616455078, "learning_rate": 9.097777777777778e-06, "loss": 1.2459, "step": 10462 }, { "epoch": 83.704, "grad_norm": 17.684572219848633, "learning_rate": 9.093333333333335e-06, "loss": 0.9012, "step": 10463 }, { "epoch": 83.712, "grad_norm": 31.8175048828125, "learning_rate": 9.08888888888889e-06, "loss": 0.732, "step": 10464 }, { "epoch": 83.72, "grad_norm": 25.520971298217773, "learning_rate": 9.084444444444445e-06, "loss": 0.8889, "step": 10465 }, { "epoch": 83.728, "grad_norm": 20.080345153808594, "learning_rate": 9.080000000000001e-06, "loss": 1.1549, "step": 10466 }, { "epoch": 83.736, "grad_norm": 24.8560733795166, "learning_rate": 9.075555555555556e-06, "loss": 0.6491, "step": 10467 }, { "epoch": 83.744, "grad_norm": 53.60213851928711, "learning_rate": 9.071111111111113e-06, "loss": 0.6471, "step": 10468 }, { "epoch": 83.752, "grad_norm": 21.18506622314453, "learning_rate": 9.066666666666667e-06, "loss": 0.7212, "step": 10469 }, { "epoch": 83.76, "grad_norm": 43.44137191772461, "learning_rate": 9.062222222222224e-06, "loss": 0.688, "step": 10470 }, { "epoch": 83.768, "grad_norm": 24.988191604614258, "learning_rate": 9.057777777777777e-06, "loss": 1.7848, "step": 10471 }, { "epoch": 83.776, "grad_norm": 22.81592559814453, "learning_rate": 9.053333333333334e-06, "loss": 1.0049, "step": 10472 }, { "epoch": 83.784, "grad_norm": 28.020034790039062, "learning_rate": 9.048888888888888e-06, "loss": 1.2952, "step": 10473 }, { "epoch": 83.792, "grad_norm": 40.99970626831055, "learning_rate": 9.044444444444445e-06, "loss": 0.5336, "step": 10474 }, { "epoch": 83.8, "grad_norm": 20.42654800415039, "learning_rate": 9.04e-06, "loss": 1.0845, "step": 10475 }, { "epoch": 83.808, "grad_norm": 14.187870025634766, "learning_rate": 9.035555555555555e-06, "loss": 0.9318, "step": 10476 }, { "epoch": 83.816, "grad_norm": 67.50132751464844, "learning_rate": 9.031111111111111e-06, "loss": 0.864, "step": 10477 }, { "epoch": 83.824, "grad_norm": 23.97684669494629, "learning_rate": 9.026666666666666e-06, "loss": 0.7798, "step": 10478 }, { "epoch": 83.832, "grad_norm": 21.985857009887695, "learning_rate": 9.022222222222223e-06, "loss": 0.9551, "step": 10479 }, { "epoch": 83.84, "grad_norm": 20.808948516845703, "learning_rate": 9.017777777777777e-06, "loss": 0.8883, "step": 10480 }, { "epoch": 83.848, "grad_norm": 27.481298446655273, "learning_rate": 9.013333333333334e-06, "loss": 0.6218, "step": 10481 }, { "epoch": 83.856, "grad_norm": 267.4560852050781, "learning_rate": 9.008888888888889e-06, "loss": 0.7761, "step": 10482 }, { "epoch": 83.864, "grad_norm": 23.107135772705078, "learning_rate": 9.004444444444444e-06, "loss": 0.5839, "step": 10483 }, { "epoch": 83.872, "grad_norm": 48.785675048828125, "learning_rate": 9e-06, "loss": 1.2514, "step": 10484 }, { "epoch": 83.88, "grad_norm": 27.73011016845703, "learning_rate": 8.995555555555555e-06, "loss": 0.8653, "step": 10485 }, { "epoch": 83.888, "grad_norm": 18.449548721313477, "learning_rate": 8.991111111111112e-06, "loss": 0.5555, "step": 10486 }, { "epoch": 83.896, "grad_norm": 24.624225616455078, "learning_rate": 8.986666666666666e-06, "loss": 0.9857, "step": 10487 }, { "epoch": 83.904, "grad_norm": 19.420490264892578, "learning_rate": 8.982222222222223e-06, "loss": 0.7413, "step": 10488 }, { "epoch": 83.912, "grad_norm": 26.39250373840332, "learning_rate": 8.977777777777778e-06, "loss": 0.7037, "step": 10489 }, { "epoch": 83.92, "grad_norm": 20.04631805419922, "learning_rate": 8.973333333333334e-06, "loss": 0.6649, "step": 10490 }, { "epoch": 83.928, "grad_norm": 41.12007522583008, "learning_rate": 8.96888888888889e-06, "loss": 0.859, "step": 10491 }, { "epoch": 83.936, "grad_norm": 23.996856689453125, "learning_rate": 8.964444444444444e-06, "loss": 0.953, "step": 10492 }, { "epoch": 83.944, "grad_norm": 22.330829620361328, "learning_rate": 8.96e-06, "loss": 0.7418, "step": 10493 }, { "epoch": 83.952, "grad_norm": 27.718612670898438, "learning_rate": 8.955555555555555e-06, "loss": 0.9632, "step": 10494 }, { "epoch": 83.96, "grad_norm": 16.44587516784668, "learning_rate": 8.951111111111112e-06, "loss": 1.0613, "step": 10495 }, { "epoch": 83.968, "grad_norm": 20.30019760131836, "learning_rate": 8.946666666666667e-06, "loss": 0.6106, "step": 10496 }, { "epoch": 83.976, "grad_norm": 18.924304962158203, "learning_rate": 8.942222222222223e-06, "loss": 0.714, "step": 10497 }, { "epoch": 83.984, "grad_norm": 32.90071105957031, "learning_rate": 8.937777777777778e-06, "loss": 0.8324, "step": 10498 }, { "epoch": 83.992, "grad_norm": 13.473955154418945, "learning_rate": 8.933333333333333e-06, "loss": 0.6992, "step": 10499 }, { "epoch": 84.0, "grad_norm": 29.587934494018555, "learning_rate": 8.92888888888889e-06, "loss": 0.8782, "step": 10500 }, { "epoch": 84.0, "eval_loss": 0.9224801659584045, "eval_map": 0.4657, "eval_map_50": 0.8005, "eval_map_75": 0.4944, "eval_map_Coverall": 0.7016, "eval_map_Face_Shield": 0.5355, "eval_map_Gloves": 0.3701, "eval_map_Goggles": 0.2861, "eval_map_Mask": 0.4354, "eval_map_large": 0.6585, "eval_map_medium": 0.3374, "eval_map_small": 0.375, "eval_mar_1": 0.3528, "eval_mar_10": 0.5919, "eval_mar_100": 0.6039, "eval_mar_100_Coverall": 0.7756, "eval_mar_100_Face_Shield": 0.6765, "eval_mar_100_Gloves": 0.518, "eval_mar_100_Goggles": 0.5188, "eval_mar_100_Mask": 0.5308, "eval_mar_large": 0.7989, "eval_mar_medium": 0.5198, "eval_mar_small": 0.423, "eval_runtime": 0.9153, "eval_samples_per_second": 31.683, "eval_steps_per_second": 2.185, "step": 10500 }, { "epoch": 84.008, "grad_norm": 25.497661590576172, "learning_rate": 8.924444444444444e-06, "loss": 1.3653, "step": 10501 }, { "epoch": 84.016, "grad_norm": 25.686067581176758, "learning_rate": 8.920000000000001e-06, "loss": 0.878, "step": 10502 }, { "epoch": 84.024, "grad_norm": 46.29133987426758, "learning_rate": 8.915555555555556e-06, "loss": 0.7617, "step": 10503 }, { "epoch": 84.032, "grad_norm": 17.99452018737793, "learning_rate": 8.911111111111112e-06, "loss": 0.6102, "step": 10504 }, { "epoch": 84.04, "grad_norm": 30.54508399963379, "learning_rate": 8.906666666666667e-06, "loss": 0.9317, "step": 10505 }, { "epoch": 84.048, "grad_norm": 18.238433837890625, "learning_rate": 8.902222222222222e-06, "loss": 1.061, "step": 10506 }, { "epoch": 84.056, "grad_norm": 17.612218856811523, "learning_rate": 8.897777777777779e-06, "loss": 0.73, "step": 10507 }, { "epoch": 84.064, "grad_norm": 25.301307678222656, "learning_rate": 8.893333333333333e-06, "loss": 0.8742, "step": 10508 }, { "epoch": 84.072, "grad_norm": 19.632802963256836, "learning_rate": 8.88888888888889e-06, "loss": 0.8883, "step": 10509 }, { "epoch": 84.08, "grad_norm": 23.74407958984375, "learning_rate": 8.884444444444445e-06, "loss": 0.6471, "step": 10510 }, { "epoch": 84.088, "grad_norm": 37.50954818725586, "learning_rate": 8.880000000000001e-06, "loss": 1.0241, "step": 10511 }, { "epoch": 84.096, "grad_norm": 24.98404312133789, "learning_rate": 8.875555555555556e-06, "loss": 0.8382, "step": 10512 }, { "epoch": 84.104, "grad_norm": 10.143538475036621, "learning_rate": 8.871111111111113e-06, "loss": 0.7311, "step": 10513 }, { "epoch": 84.112, "grad_norm": 69.93220520019531, "learning_rate": 8.866666666666668e-06, "loss": 0.5536, "step": 10514 }, { "epoch": 84.12, "grad_norm": 22.642784118652344, "learning_rate": 8.862222222222222e-06, "loss": 0.9935, "step": 10515 }, { "epoch": 84.128, "grad_norm": 22.120624542236328, "learning_rate": 8.857777777777779e-06, "loss": 0.9277, "step": 10516 }, { "epoch": 84.136, "grad_norm": 38.495479583740234, "learning_rate": 8.853333333333334e-06, "loss": 0.726, "step": 10517 }, { "epoch": 84.144, "grad_norm": 91.57625579833984, "learning_rate": 8.84888888888889e-06, "loss": 1.0098, "step": 10518 }, { "epoch": 84.152, "grad_norm": 29.77353858947754, "learning_rate": 8.844444444444445e-06, "loss": 0.9945, "step": 10519 }, { "epoch": 84.16, "grad_norm": 20.89712905883789, "learning_rate": 8.840000000000002e-06, "loss": 0.7551, "step": 10520 }, { "epoch": 84.168, "grad_norm": 19.171977996826172, "learning_rate": 8.835555555555557e-06, "loss": 0.6719, "step": 10521 }, { "epoch": 84.176, "grad_norm": 38.6698112487793, "learning_rate": 8.831111111111111e-06, "loss": 1.1526, "step": 10522 }, { "epoch": 84.184, "grad_norm": 40.3394775390625, "learning_rate": 8.826666666666666e-06, "loss": 0.953, "step": 10523 }, { "epoch": 84.192, "grad_norm": 36.65724563598633, "learning_rate": 8.822222222222223e-06, "loss": 1.0456, "step": 10524 }, { "epoch": 84.2, "grad_norm": 31.752479553222656, "learning_rate": 8.817777777777778e-06, "loss": 0.8574, "step": 10525 }, { "epoch": 84.208, "grad_norm": 24.398277282714844, "learning_rate": 8.813333333333333e-06, "loss": 0.9294, "step": 10526 }, { "epoch": 84.216, "grad_norm": 27.77591896057129, "learning_rate": 8.808888888888889e-06, "loss": 1.1465, "step": 10527 }, { "epoch": 84.224, "grad_norm": 17.32582664489746, "learning_rate": 8.804444444444444e-06, "loss": 0.7569, "step": 10528 }, { "epoch": 84.232, "grad_norm": 15.742376327514648, "learning_rate": 8.8e-06, "loss": 0.8182, "step": 10529 }, { "epoch": 84.24, "grad_norm": 20.28227996826172, "learning_rate": 8.795555555555555e-06, "loss": 0.8732, "step": 10530 }, { "epoch": 84.248, "grad_norm": 27.381467819213867, "learning_rate": 8.791111111111112e-06, "loss": 1.0388, "step": 10531 }, { "epoch": 84.256, "grad_norm": 52.06999588012695, "learning_rate": 8.786666666666667e-06, "loss": 1.375, "step": 10532 }, { "epoch": 84.264, "grad_norm": 15.330655097961426, "learning_rate": 8.782222222222222e-06, "loss": 0.6053, "step": 10533 }, { "epoch": 84.272, "grad_norm": 23.12376594543457, "learning_rate": 8.777777777777778e-06, "loss": 0.6518, "step": 10534 }, { "epoch": 84.28, "grad_norm": 39.496482849121094, "learning_rate": 8.773333333333333e-06, "loss": 0.7321, "step": 10535 }, { "epoch": 84.288, "grad_norm": 14.61496639251709, "learning_rate": 8.76888888888889e-06, "loss": 0.7507, "step": 10536 }, { "epoch": 84.296, "grad_norm": 16.13677215576172, "learning_rate": 8.764444444444444e-06, "loss": 1.0428, "step": 10537 }, { "epoch": 84.304, "grad_norm": 16.7780704498291, "learning_rate": 8.76e-06, "loss": 0.7176, "step": 10538 }, { "epoch": 84.312, "grad_norm": 40.3660888671875, "learning_rate": 8.755555555555556e-06, "loss": 0.6205, "step": 10539 }, { "epoch": 84.32, "grad_norm": 26.620214462280273, "learning_rate": 8.75111111111111e-06, "loss": 0.9543, "step": 10540 }, { "epoch": 84.328, "grad_norm": 24.449920654296875, "learning_rate": 8.746666666666667e-06, "loss": 0.7002, "step": 10541 }, { "epoch": 84.336, "grad_norm": 29.736047744750977, "learning_rate": 8.742222222222222e-06, "loss": 0.6012, "step": 10542 }, { "epoch": 84.344, "grad_norm": 25.04981231689453, "learning_rate": 8.737777777777778e-06, "loss": 0.9277, "step": 10543 }, { "epoch": 84.352, "grad_norm": 92.80799865722656, "learning_rate": 8.733333333333333e-06, "loss": 0.8757, "step": 10544 }, { "epoch": 84.36, "grad_norm": 27.31580352783203, "learning_rate": 8.72888888888889e-06, "loss": 0.7244, "step": 10545 }, { "epoch": 84.368, "grad_norm": 20.33370018005371, "learning_rate": 8.724444444444445e-06, "loss": 0.8428, "step": 10546 }, { "epoch": 84.376, "grad_norm": 17.366195678710938, "learning_rate": 8.720000000000001e-06, "loss": 0.8152, "step": 10547 }, { "epoch": 84.384, "grad_norm": 67.0474624633789, "learning_rate": 8.715555555555556e-06, "loss": 0.8046, "step": 10548 }, { "epoch": 84.392, "grad_norm": 29.917081832885742, "learning_rate": 8.711111111111111e-06, "loss": 0.8846, "step": 10549 }, { "epoch": 84.4, "grad_norm": 46.5757942199707, "learning_rate": 8.706666666666667e-06, "loss": 0.9506, "step": 10550 }, { "epoch": 84.408, "grad_norm": 35.32502365112305, "learning_rate": 8.702222222222222e-06, "loss": 0.9401, "step": 10551 }, { "epoch": 84.416, "grad_norm": 14.246539115905762, "learning_rate": 8.697777777777779e-06, "loss": 1.016, "step": 10552 }, { "epoch": 84.424, "grad_norm": 18.507888793945312, "learning_rate": 8.693333333333334e-06, "loss": 0.9061, "step": 10553 }, { "epoch": 84.432, "grad_norm": 19.949359893798828, "learning_rate": 8.68888888888889e-06, "loss": 0.8586, "step": 10554 }, { "epoch": 84.44, "grad_norm": 23.67621612548828, "learning_rate": 8.684444444444445e-06, "loss": 0.8776, "step": 10555 }, { "epoch": 84.448, "grad_norm": 13.61921215057373, "learning_rate": 8.68e-06, "loss": 1.1008, "step": 10556 }, { "epoch": 84.456, "grad_norm": 45.877899169921875, "learning_rate": 8.675555555555556e-06, "loss": 0.6451, "step": 10557 }, { "epoch": 84.464, "grad_norm": 46.4353141784668, "learning_rate": 8.671111111111111e-06, "loss": 0.712, "step": 10558 }, { "epoch": 84.472, "grad_norm": 48.82621383666992, "learning_rate": 8.666666666666668e-06, "loss": 0.6083, "step": 10559 }, { "epoch": 84.48, "grad_norm": 20.831335067749023, "learning_rate": 8.662222222222223e-06, "loss": 1.6578, "step": 10560 }, { "epoch": 84.488, "grad_norm": 20.371501922607422, "learning_rate": 8.65777777777778e-06, "loss": 0.7265, "step": 10561 }, { "epoch": 84.496, "grad_norm": 22.62664794921875, "learning_rate": 8.653333333333334e-06, "loss": 0.7824, "step": 10562 }, { "epoch": 84.504, "grad_norm": 23.102022171020508, "learning_rate": 8.648888888888889e-06, "loss": 0.6354, "step": 10563 }, { "epoch": 84.512, "grad_norm": 56.52484130859375, "learning_rate": 8.644444444444445e-06, "loss": 0.7423, "step": 10564 }, { "epoch": 84.52, "grad_norm": 26.82114028930664, "learning_rate": 8.64e-06, "loss": 1.2034, "step": 10565 }, { "epoch": 84.528, "grad_norm": 18.072778701782227, "learning_rate": 8.635555555555557e-06, "loss": 0.736, "step": 10566 }, { "epoch": 84.536, "grad_norm": 16.800989151000977, "learning_rate": 8.631111111111112e-06, "loss": 0.6391, "step": 10567 }, { "epoch": 84.544, "grad_norm": 22.931293487548828, "learning_rate": 8.626666666666668e-06, "loss": 0.963, "step": 10568 }, { "epoch": 84.552, "grad_norm": 54.993812561035156, "learning_rate": 8.622222222222223e-06, "loss": 0.5408, "step": 10569 }, { "epoch": 84.56, "grad_norm": 17.977432250976562, "learning_rate": 8.61777777777778e-06, "loss": 0.6712, "step": 10570 }, { "epoch": 84.568, "grad_norm": 25.16407585144043, "learning_rate": 8.613333333333334e-06, "loss": 0.7717, "step": 10571 }, { "epoch": 84.576, "grad_norm": 30.566030502319336, "learning_rate": 8.60888888888889e-06, "loss": 2.6244, "step": 10572 }, { "epoch": 84.584, "grad_norm": 15.03775691986084, "learning_rate": 8.604444444444446e-06, "loss": 0.5805, "step": 10573 }, { "epoch": 84.592, "grad_norm": 24.236356735229492, "learning_rate": 8.599999999999999e-06, "loss": 0.5859, "step": 10574 }, { "epoch": 84.6, "grad_norm": 30.60033416748047, "learning_rate": 8.595555555555556e-06, "loss": 1.4695, "step": 10575 }, { "epoch": 84.608, "grad_norm": 28.94411849975586, "learning_rate": 8.59111111111111e-06, "loss": 0.5868, "step": 10576 }, { "epoch": 84.616, "grad_norm": 38.69142532348633, "learning_rate": 8.586666666666667e-06, "loss": 0.8482, "step": 10577 }, { "epoch": 84.624, "grad_norm": 14.294096946716309, "learning_rate": 8.582222222222222e-06, "loss": 1.0222, "step": 10578 }, { "epoch": 84.632, "grad_norm": 22.55918312072754, "learning_rate": 8.577777777777778e-06, "loss": 0.5779, "step": 10579 }, { "epoch": 84.64, "grad_norm": 32.46064376831055, "learning_rate": 8.573333333333333e-06, "loss": 1.8116, "step": 10580 }, { "epoch": 84.648, "grad_norm": 14.026567459106445, "learning_rate": 8.568888888888888e-06, "loss": 0.7398, "step": 10581 }, { "epoch": 84.656, "grad_norm": 49.91154861450195, "learning_rate": 8.564444444444445e-06, "loss": 1.1613, "step": 10582 }, { "epoch": 84.664, "grad_norm": 24.899242401123047, "learning_rate": 8.56e-06, "loss": 0.4494, "step": 10583 }, { "epoch": 84.672, "grad_norm": 17.80312156677246, "learning_rate": 8.555555555555556e-06, "loss": 0.8474, "step": 10584 }, { "epoch": 84.68, "grad_norm": 12.968093872070312, "learning_rate": 8.55111111111111e-06, "loss": 0.8474, "step": 10585 }, { "epoch": 84.688, "grad_norm": 25.15688133239746, "learning_rate": 8.546666666666667e-06, "loss": 0.9381, "step": 10586 }, { "epoch": 84.696, "grad_norm": 14.798428535461426, "learning_rate": 8.542222222222222e-06, "loss": 0.721, "step": 10587 }, { "epoch": 84.704, "grad_norm": 23.462997436523438, "learning_rate": 8.537777777777779e-06, "loss": 0.4936, "step": 10588 }, { "epoch": 84.712, "grad_norm": 22.12965965270996, "learning_rate": 8.533333333333334e-06, "loss": 0.7569, "step": 10589 }, { "epoch": 84.72, "grad_norm": 21.18203353881836, "learning_rate": 8.528888888888888e-06, "loss": 0.7925, "step": 10590 }, { "epoch": 84.728, "grad_norm": 22.495849609375, "learning_rate": 8.524444444444445e-06, "loss": 1.1293, "step": 10591 }, { "epoch": 84.736, "grad_norm": 41.26902389526367, "learning_rate": 8.52e-06, "loss": 0.6593, "step": 10592 }, { "epoch": 84.744, "grad_norm": 103.54942321777344, "learning_rate": 8.515555555555556e-06, "loss": 0.8855, "step": 10593 }, { "epoch": 84.752, "grad_norm": 30.067602157592773, "learning_rate": 8.511111111111111e-06, "loss": 0.5561, "step": 10594 }, { "epoch": 84.76, "grad_norm": 43.956756591796875, "learning_rate": 8.506666666666668e-06, "loss": 0.6327, "step": 10595 }, { "epoch": 84.768, "grad_norm": 16.92805290222168, "learning_rate": 8.502222222222223e-06, "loss": 0.6121, "step": 10596 }, { "epoch": 84.776, "grad_norm": 56.22417068481445, "learning_rate": 8.497777777777777e-06, "loss": 0.7681, "step": 10597 }, { "epoch": 84.784, "grad_norm": 30.27033233642578, "learning_rate": 8.493333333333334e-06, "loss": 0.9781, "step": 10598 }, { "epoch": 84.792, "grad_norm": 44.68722915649414, "learning_rate": 8.488888888888889e-06, "loss": 0.9454, "step": 10599 }, { "epoch": 84.8, "grad_norm": 23.892438888549805, "learning_rate": 8.484444444444445e-06, "loss": 0.6124, "step": 10600 }, { "epoch": 84.808, "grad_norm": 24.863161087036133, "learning_rate": 8.48e-06, "loss": 0.8946, "step": 10601 }, { "epoch": 84.816, "grad_norm": 302.8157043457031, "learning_rate": 8.475555555555557e-06, "loss": 1.7996, "step": 10602 }, { "epoch": 84.824, "grad_norm": 15.05617618560791, "learning_rate": 8.471111111111112e-06, "loss": 1.0756, "step": 10603 }, { "epoch": 84.832, "grad_norm": 19.71489143371582, "learning_rate": 8.466666666666666e-06, "loss": 0.6873, "step": 10604 }, { "epoch": 84.84, "grad_norm": 27.640390396118164, "learning_rate": 8.462222222222223e-06, "loss": 2.2999, "step": 10605 }, { "epoch": 84.848, "grad_norm": 480.19366455078125, "learning_rate": 8.457777777777778e-06, "loss": 0.7592, "step": 10606 }, { "epoch": 84.856, "grad_norm": 18.870153427124023, "learning_rate": 8.453333333333334e-06, "loss": 0.719, "step": 10607 }, { "epoch": 84.864, "grad_norm": 24.14615821838379, "learning_rate": 8.448888888888889e-06, "loss": 0.6496, "step": 10608 }, { "epoch": 84.872, "grad_norm": 20.721351623535156, "learning_rate": 8.444444444444446e-06, "loss": 0.756, "step": 10609 }, { "epoch": 84.88, "grad_norm": 19.654136657714844, "learning_rate": 8.44e-06, "loss": 0.8441, "step": 10610 }, { "epoch": 84.888, "grad_norm": 26.895130157470703, "learning_rate": 8.435555555555557e-06, "loss": 0.9052, "step": 10611 }, { "epoch": 84.896, "grad_norm": 23.88959503173828, "learning_rate": 8.431111111111112e-06, "loss": 0.7868, "step": 10612 }, { "epoch": 84.904, "grad_norm": 19.655473709106445, "learning_rate": 8.426666666666667e-06, "loss": 0.6511, "step": 10613 }, { "epoch": 84.912, "grad_norm": 18.50193214416504, "learning_rate": 8.422222222222223e-06, "loss": 0.6489, "step": 10614 }, { "epoch": 84.92, "grad_norm": 42.23824691772461, "learning_rate": 8.417777777777778e-06, "loss": 0.9526, "step": 10615 }, { "epoch": 84.928, "grad_norm": 124.29719543457031, "learning_rate": 8.413333333333335e-06, "loss": 0.7847, "step": 10616 }, { "epoch": 84.936, "grad_norm": 14.003665924072266, "learning_rate": 8.40888888888889e-06, "loss": 1.2346, "step": 10617 }, { "epoch": 84.944, "grad_norm": 18.589332580566406, "learning_rate": 8.404444444444446e-06, "loss": 0.7709, "step": 10618 }, { "epoch": 84.952, "grad_norm": 19.595067977905273, "learning_rate": 8.400000000000001e-06, "loss": 0.9345, "step": 10619 }, { "epoch": 84.96, "grad_norm": 22.042417526245117, "learning_rate": 8.395555555555556e-06, "loss": 0.851, "step": 10620 }, { "epoch": 84.968, "grad_norm": 106.26229095458984, "learning_rate": 8.391111111111112e-06, "loss": 0.8961, "step": 10621 }, { "epoch": 84.976, "grad_norm": 13.13810920715332, "learning_rate": 8.386666666666667e-06, "loss": 0.8036, "step": 10622 }, { "epoch": 84.984, "grad_norm": 18.695173263549805, "learning_rate": 8.382222222222224e-06, "loss": 0.9583, "step": 10623 }, { "epoch": 84.992, "grad_norm": 22.477794647216797, "learning_rate": 8.377777777777779e-06, "loss": 0.7942, "step": 10624 }, { "epoch": 85.0, "grad_norm": 16.917558670043945, "learning_rate": 8.373333333333335e-06, "loss": 0.8598, "step": 10625 }, { "epoch": 85.0, "eval_loss": 0.9285418391227722, "eval_map": 0.4628, "eval_map_50": 0.7908, "eval_map_75": 0.5246, "eval_map_Coverall": 0.6849, "eval_map_Face_Shield": 0.5875, "eval_map_Gloves": 0.366, "eval_map_Goggles": 0.2615, "eval_map_Mask": 0.4139, "eval_map_large": 0.6574, "eval_map_medium": 0.3396, "eval_map_small": 0.3368, "eval_mar_1": 0.355, "eval_mar_10": 0.5945, "eval_mar_100": 0.6038, "eval_mar_100_Coverall": 0.7778, "eval_mar_100_Face_Shield": 0.7176, "eval_mar_100_Gloves": 0.5115, "eval_mar_100_Goggles": 0.5219, "eval_mar_100_Mask": 0.4904, "eval_mar_large": 0.7854, "eval_mar_medium": 0.5122, "eval_mar_small": 0.39, "eval_runtime": 0.9149, "eval_samples_per_second": 31.699, "eval_steps_per_second": 2.186, "step": 10625 }, { "epoch": 85.008, "grad_norm": 14.734566688537598, "learning_rate": 8.368888888888888e-06, "loss": 0.8236, "step": 10626 }, { "epoch": 85.016, "grad_norm": 23.112573623657227, "learning_rate": 8.364444444444445e-06, "loss": 0.777, "step": 10627 }, { "epoch": 85.024, "grad_norm": 19.9703311920166, "learning_rate": 8.36e-06, "loss": 0.7691, "step": 10628 }, { "epoch": 85.032, "grad_norm": 22.05351448059082, "learning_rate": 8.355555555555556e-06, "loss": 0.9508, "step": 10629 }, { "epoch": 85.04, "grad_norm": 34.14179229736328, "learning_rate": 8.351111111111111e-06, "loss": 1.3682, "step": 10630 }, { "epoch": 85.048, "grad_norm": 13.71300220489502, "learning_rate": 8.346666666666666e-06, "loss": 0.5758, "step": 10631 }, { "epoch": 85.056, "grad_norm": 41.214656829833984, "learning_rate": 8.342222222222222e-06, "loss": 0.8044, "step": 10632 }, { "epoch": 85.064, "grad_norm": 15.755704879760742, "learning_rate": 8.337777777777777e-06, "loss": 1.1369, "step": 10633 }, { "epoch": 85.072, "grad_norm": 16.802743911743164, "learning_rate": 8.333333333333334e-06, "loss": 0.9056, "step": 10634 }, { "epoch": 85.08, "grad_norm": 31.657012939453125, "learning_rate": 8.328888888888889e-06, "loss": 0.8644, "step": 10635 }, { "epoch": 85.088, "grad_norm": 23.626867294311523, "learning_rate": 8.324444444444445e-06, "loss": 0.8538, "step": 10636 }, { "epoch": 85.096, "grad_norm": 127.2234878540039, "learning_rate": 8.32e-06, "loss": 0.8842, "step": 10637 }, { "epoch": 85.104, "grad_norm": 26.282981872558594, "learning_rate": 8.315555555555555e-06, "loss": 1.555, "step": 10638 }, { "epoch": 85.112, "grad_norm": 52.163639068603516, "learning_rate": 8.311111111111111e-06, "loss": 0.9821, "step": 10639 }, { "epoch": 85.12, "grad_norm": 21.539278030395508, "learning_rate": 8.306666666666666e-06, "loss": 0.9124, "step": 10640 }, { "epoch": 85.128, "grad_norm": 44.12542724609375, "learning_rate": 8.302222222222223e-06, "loss": 1.1286, "step": 10641 }, { "epoch": 85.136, "grad_norm": 35.856632232666016, "learning_rate": 8.297777777777778e-06, "loss": 1.0744, "step": 10642 }, { "epoch": 85.144, "grad_norm": 21.3373966217041, "learning_rate": 8.293333333333334e-06, "loss": 1.6626, "step": 10643 }, { "epoch": 85.152, "grad_norm": 23.111330032348633, "learning_rate": 8.288888888888889e-06, "loss": 0.8112, "step": 10644 }, { "epoch": 85.16, "grad_norm": 20.862428665161133, "learning_rate": 8.284444444444446e-06, "loss": 0.869, "step": 10645 }, { "epoch": 85.168, "grad_norm": 29.834732055664062, "learning_rate": 8.28e-06, "loss": 0.8228, "step": 10646 }, { "epoch": 85.176, "grad_norm": 23.704618453979492, "learning_rate": 8.275555555555555e-06, "loss": 0.6718, "step": 10647 }, { "epoch": 85.184, "grad_norm": 21.566455841064453, "learning_rate": 8.271111111111112e-06, "loss": 0.9826, "step": 10648 }, { "epoch": 85.192, "grad_norm": 17.414443969726562, "learning_rate": 8.266666666666667e-06, "loss": 0.7452, "step": 10649 }, { "epoch": 85.2, "grad_norm": 16.33457374572754, "learning_rate": 8.262222222222223e-06, "loss": 1.1751, "step": 10650 }, { "epoch": 85.208, "grad_norm": 23.774335861206055, "learning_rate": 8.257777777777778e-06, "loss": 0.5879, "step": 10651 }, { "epoch": 85.216, "grad_norm": 41.775245666503906, "learning_rate": 8.253333333333334e-06, "loss": 0.9057, "step": 10652 }, { "epoch": 85.224, "grad_norm": 36.18773651123047, "learning_rate": 8.24888888888889e-06, "loss": 1.0302, "step": 10653 }, { "epoch": 85.232, "grad_norm": 48.77586364746094, "learning_rate": 8.244444444444444e-06, "loss": 0.9365, "step": 10654 }, { "epoch": 85.24, "grad_norm": 17.659351348876953, "learning_rate": 8.24e-06, "loss": 0.8013, "step": 10655 }, { "epoch": 85.248, "grad_norm": 23.787874221801758, "learning_rate": 8.235555555555556e-06, "loss": 0.6252, "step": 10656 }, { "epoch": 85.256, "grad_norm": 20.6852970123291, "learning_rate": 8.231111111111112e-06, "loss": 0.7565, "step": 10657 }, { "epoch": 85.264, "grad_norm": 23.26120376586914, "learning_rate": 8.226666666666667e-06, "loss": 0.7207, "step": 10658 }, { "epoch": 85.272, "grad_norm": 29.20979118347168, "learning_rate": 8.222222222222223e-06, "loss": 0.9257, "step": 10659 }, { "epoch": 85.28, "grad_norm": 16.34465217590332, "learning_rate": 8.217777777777778e-06, "loss": 0.8321, "step": 10660 }, { "epoch": 85.288, "grad_norm": 30.587312698364258, "learning_rate": 8.213333333333333e-06, "loss": 1.0828, "step": 10661 }, { "epoch": 85.296, "grad_norm": 23.13964080810547, "learning_rate": 8.20888888888889e-06, "loss": 0.9666, "step": 10662 }, { "epoch": 85.304, "grad_norm": 23.070236206054688, "learning_rate": 8.204444444444445e-06, "loss": 0.9117, "step": 10663 }, { "epoch": 85.312, "grad_norm": 21.581695556640625, "learning_rate": 8.200000000000001e-06, "loss": 0.8982, "step": 10664 }, { "epoch": 85.32, "grad_norm": 51.960086822509766, "learning_rate": 8.195555555555556e-06, "loss": 0.6007, "step": 10665 }, { "epoch": 85.328, "grad_norm": 29.217653274536133, "learning_rate": 8.191111111111112e-06, "loss": 0.7873, "step": 10666 }, { "epoch": 85.336, "grad_norm": 16.873437881469727, "learning_rate": 8.186666666666667e-06, "loss": 0.5701, "step": 10667 }, { "epoch": 85.344, "grad_norm": 15.864097595214844, "learning_rate": 8.182222222222224e-06, "loss": 0.5359, "step": 10668 }, { "epoch": 85.352, "grad_norm": 27.525754928588867, "learning_rate": 8.177777777777779e-06, "loss": 0.5832, "step": 10669 }, { "epoch": 85.36, "grad_norm": 105.7946548461914, "learning_rate": 8.173333333333334e-06, "loss": 1.1058, "step": 10670 }, { "epoch": 85.368, "grad_norm": 32.8873291015625, "learning_rate": 8.16888888888889e-06, "loss": 0.8864, "step": 10671 }, { "epoch": 85.376, "grad_norm": 14.384588241577148, "learning_rate": 8.164444444444445e-06, "loss": 0.6653, "step": 10672 }, { "epoch": 85.384, "grad_norm": 51.88847351074219, "learning_rate": 8.160000000000001e-06, "loss": 0.7608, "step": 10673 }, { "epoch": 85.392, "grad_norm": 22.79644203186035, "learning_rate": 8.155555555555556e-06, "loss": 0.7358, "step": 10674 }, { "epoch": 85.4, "grad_norm": 17.14124298095703, "learning_rate": 8.151111111111113e-06, "loss": 0.9218, "step": 10675 }, { "epoch": 85.408, "grad_norm": 21.04739761352539, "learning_rate": 8.146666666666668e-06, "loss": 0.717, "step": 10676 }, { "epoch": 85.416, "grad_norm": 32.7950439453125, "learning_rate": 8.142222222222223e-06, "loss": 1.2493, "step": 10677 }, { "epoch": 85.424, "grad_norm": 23.843502044677734, "learning_rate": 8.137777777777777e-06, "loss": 0.8761, "step": 10678 }, { "epoch": 85.432, "grad_norm": 15.594465255737305, "learning_rate": 8.133333333333332e-06, "loss": 0.8359, "step": 10679 }, { "epoch": 85.44, "grad_norm": 55.37944412231445, "learning_rate": 8.128888888888889e-06, "loss": 0.6021, "step": 10680 }, { "epoch": 85.448, "grad_norm": 17.148475646972656, "learning_rate": 8.124444444444444e-06, "loss": 1.028, "step": 10681 }, { "epoch": 85.456, "grad_norm": 18.78461265563965, "learning_rate": 8.12e-06, "loss": 1.0017, "step": 10682 }, { "epoch": 85.464, "grad_norm": 14.87514591217041, "learning_rate": 8.115555555555555e-06, "loss": 0.8832, "step": 10683 }, { "epoch": 85.472, "grad_norm": 28.659595489501953, "learning_rate": 8.111111111111112e-06, "loss": 0.9658, "step": 10684 }, { "epoch": 85.48, "grad_norm": 36.136383056640625, "learning_rate": 8.106666666666666e-06, "loss": 0.8964, "step": 10685 }, { "epoch": 85.488, "grad_norm": 14.258651733398438, "learning_rate": 8.102222222222223e-06, "loss": 0.7618, "step": 10686 }, { "epoch": 85.496, "grad_norm": 30.21490478515625, "learning_rate": 8.097777777777778e-06, "loss": 0.8353, "step": 10687 }, { "epoch": 85.504, "grad_norm": 25.23017120361328, "learning_rate": 8.093333333333333e-06, "loss": 1.3199, "step": 10688 }, { "epoch": 85.512, "grad_norm": 34.116249084472656, "learning_rate": 8.08888888888889e-06, "loss": 0.8399, "step": 10689 }, { "epoch": 85.52, "grad_norm": 29.30133819580078, "learning_rate": 8.084444444444444e-06, "loss": 0.8726, "step": 10690 }, { "epoch": 85.528, "grad_norm": 39.28391647338867, "learning_rate": 8.08e-06, "loss": 0.9231, "step": 10691 }, { "epoch": 85.536, "grad_norm": 15.052464485168457, "learning_rate": 8.075555555555555e-06, "loss": 0.8629, "step": 10692 }, { "epoch": 85.544, "grad_norm": 18.756664276123047, "learning_rate": 8.071111111111112e-06, "loss": 0.6516, "step": 10693 }, { "epoch": 85.552, "grad_norm": 16.58112335205078, "learning_rate": 8.066666666666667e-06, "loss": 0.67, "step": 10694 }, { "epoch": 85.56, "grad_norm": 32.38840103149414, "learning_rate": 8.062222222222222e-06, "loss": 2.2626, "step": 10695 }, { "epoch": 85.568, "grad_norm": 16.831336975097656, "learning_rate": 8.057777777777778e-06, "loss": 1.4542, "step": 10696 }, { "epoch": 85.576, "grad_norm": 25.586994171142578, "learning_rate": 8.053333333333333e-06, "loss": 0.8018, "step": 10697 }, { "epoch": 85.584, "grad_norm": 14.696053504943848, "learning_rate": 8.04888888888889e-06, "loss": 0.8464, "step": 10698 }, { "epoch": 85.592, "grad_norm": 22.17346954345703, "learning_rate": 8.044444444444444e-06, "loss": 0.8874, "step": 10699 }, { "epoch": 85.6, "grad_norm": 31.587636947631836, "learning_rate": 8.040000000000001e-06, "loss": 0.5521, "step": 10700 }, { "epoch": 85.608, "grad_norm": 29.730024337768555, "learning_rate": 8.035555555555556e-06, "loss": 0.8906, "step": 10701 }, { "epoch": 85.616, "grad_norm": 21.99225616455078, "learning_rate": 8.03111111111111e-06, "loss": 0.8499, "step": 10702 }, { "epoch": 85.624, "grad_norm": 43.33137130737305, "learning_rate": 8.026666666666667e-06, "loss": 0.8482, "step": 10703 }, { "epoch": 85.632, "grad_norm": 34.22710418701172, "learning_rate": 8.022222222222222e-06, "loss": 0.9921, "step": 10704 }, { "epoch": 85.64, "grad_norm": 23.65632438659668, "learning_rate": 8.017777777777779e-06, "loss": 0.6654, "step": 10705 }, { "epoch": 85.648, "grad_norm": 21.367238998413086, "learning_rate": 8.013333333333333e-06, "loss": 0.5288, "step": 10706 }, { "epoch": 85.656, "grad_norm": 38.2912483215332, "learning_rate": 8.00888888888889e-06, "loss": 1.0764, "step": 10707 }, { "epoch": 85.664, "grad_norm": 126.67219543457031, "learning_rate": 8.004444444444445e-06, "loss": 0.718, "step": 10708 }, { "epoch": 85.672, "grad_norm": 27.056747436523438, "learning_rate": 8.000000000000001e-06, "loss": 0.585, "step": 10709 }, { "epoch": 85.68, "grad_norm": 16.495655059814453, "learning_rate": 7.995555555555556e-06, "loss": 0.6281, "step": 10710 }, { "epoch": 85.688, "grad_norm": 22.95977783203125, "learning_rate": 7.991111111111111e-06, "loss": 1.121, "step": 10711 }, { "epoch": 85.696, "grad_norm": 32.395362854003906, "learning_rate": 7.986666666666668e-06, "loss": 0.9199, "step": 10712 }, { "epoch": 85.704, "grad_norm": 20.923171997070312, "learning_rate": 7.982222222222222e-06, "loss": 0.7929, "step": 10713 }, { "epoch": 85.712, "grad_norm": 22.52201271057129, "learning_rate": 7.977777777777779e-06, "loss": 0.5572, "step": 10714 }, { "epoch": 85.72, "grad_norm": 22.046173095703125, "learning_rate": 7.973333333333334e-06, "loss": 0.8027, "step": 10715 }, { "epoch": 85.728, "grad_norm": 32.16286087036133, "learning_rate": 7.96888888888889e-06, "loss": 0.6966, "step": 10716 }, { "epoch": 85.736, "grad_norm": 22.684011459350586, "learning_rate": 7.964444444444445e-06, "loss": 0.8127, "step": 10717 }, { "epoch": 85.744, "grad_norm": 14.229876518249512, "learning_rate": 7.96e-06, "loss": 0.6269, "step": 10718 }, { "epoch": 85.752, "grad_norm": 21.752159118652344, "learning_rate": 7.955555555555557e-06, "loss": 0.6699, "step": 10719 }, { "epoch": 85.76, "grad_norm": 20.231796264648438, "learning_rate": 7.951111111111111e-06, "loss": 0.7433, "step": 10720 }, { "epoch": 85.768, "grad_norm": 63.96038818359375, "learning_rate": 7.946666666666668e-06, "loss": 0.5958, "step": 10721 }, { "epoch": 85.776, "grad_norm": 19.9946346282959, "learning_rate": 7.942222222222223e-06, "loss": 2.0259, "step": 10722 }, { "epoch": 85.784, "grad_norm": 37.340087890625, "learning_rate": 7.93777777777778e-06, "loss": 0.7312, "step": 10723 }, { "epoch": 85.792, "grad_norm": 21.353443145751953, "learning_rate": 7.933333333333334e-06, "loss": 0.938, "step": 10724 }, { "epoch": 85.8, "grad_norm": 10.118407249450684, "learning_rate": 7.928888888888889e-06, "loss": 0.6767, "step": 10725 }, { "epoch": 85.808, "grad_norm": 23.92495346069336, "learning_rate": 7.924444444444446e-06, "loss": 0.9154, "step": 10726 }, { "epoch": 85.816, "grad_norm": 20.292959213256836, "learning_rate": 7.92e-06, "loss": 0.68, "step": 10727 }, { "epoch": 85.824, "grad_norm": 22.024879455566406, "learning_rate": 7.915555555555557e-06, "loss": 2.6426, "step": 10728 }, { "epoch": 85.832, "grad_norm": 17.45902442932129, "learning_rate": 7.91111111111111e-06, "loss": 0.6583, "step": 10729 }, { "epoch": 85.84, "grad_norm": 18.48320960998535, "learning_rate": 7.906666666666667e-06, "loss": 1.0211, "step": 10730 }, { "epoch": 85.848, "grad_norm": 96.25332641601562, "learning_rate": 7.902222222222221e-06, "loss": 0.8236, "step": 10731 }, { "epoch": 85.856, "grad_norm": 25.638534545898438, "learning_rate": 7.897777777777778e-06, "loss": 0.4744, "step": 10732 }, { "epoch": 85.864, "grad_norm": 28.02132225036621, "learning_rate": 7.893333333333333e-06, "loss": 0.8992, "step": 10733 }, { "epoch": 85.872, "grad_norm": 28.524303436279297, "learning_rate": 7.88888888888889e-06, "loss": 0.9089, "step": 10734 }, { "epoch": 85.88, "grad_norm": 19.67633819580078, "learning_rate": 7.884444444444444e-06, "loss": 0.9694, "step": 10735 }, { "epoch": 85.888, "grad_norm": 23.843538284301758, "learning_rate": 7.879999999999999e-06, "loss": 0.7861, "step": 10736 }, { "epoch": 85.896, "grad_norm": 25.825244903564453, "learning_rate": 7.875555555555556e-06, "loss": 0.9806, "step": 10737 }, { "epoch": 85.904, "grad_norm": 25.20811653137207, "learning_rate": 7.87111111111111e-06, "loss": 0.5842, "step": 10738 }, { "epoch": 85.912, "grad_norm": 31.57871437072754, "learning_rate": 7.866666666666667e-06, "loss": 0.6288, "step": 10739 }, { "epoch": 85.92, "grad_norm": 30.42147445678711, "learning_rate": 7.862222222222222e-06, "loss": 1.0939, "step": 10740 }, { "epoch": 85.928, "grad_norm": 16.096696853637695, "learning_rate": 7.857777777777778e-06, "loss": 0.923, "step": 10741 }, { "epoch": 85.936, "grad_norm": 39.37200927734375, "learning_rate": 7.853333333333333e-06, "loss": 0.8981, "step": 10742 }, { "epoch": 85.944, "grad_norm": 22.59779930114746, "learning_rate": 7.84888888888889e-06, "loss": 1.0756, "step": 10743 }, { "epoch": 85.952, "grad_norm": 38.71036911010742, "learning_rate": 7.844444444444445e-06, "loss": 0.8506, "step": 10744 }, { "epoch": 85.96, "grad_norm": 24.451749801635742, "learning_rate": 7.84e-06, "loss": 0.857, "step": 10745 }, { "epoch": 85.968, "grad_norm": 40.05641174316406, "learning_rate": 7.835555555555556e-06, "loss": 0.7879, "step": 10746 }, { "epoch": 85.976, "grad_norm": 22.1761474609375, "learning_rate": 7.831111111111111e-06, "loss": 0.9381, "step": 10747 }, { "epoch": 85.984, "grad_norm": 23.330718994140625, "learning_rate": 7.826666666666667e-06, "loss": 0.6295, "step": 10748 }, { "epoch": 85.992, "grad_norm": 20.137462615966797, "learning_rate": 7.822222222222222e-06, "loss": 0.6994, "step": 10749 }, { "epoch": 86.0, "grad_norm": 18.3275146484375, "learning_rate": 7.817777777777779e-06, "loss": 0.5149, "step": 10750 }, { "epoch": 86.0, "eval_loss": 0.9406871795654297, "eval_map": 0.4583, "eval_map_50": 0.8075, "eval_map_75": 0.4705, "eval_map_Coverall": 0.6857, "eval_map_Face_Shield": 0.5638, "eval_map_Gloves": 0.3633, "eval_map_Goggles": 0.2639, "eval_map_Mask": 0.4147, "eval_map_large": 0.6814, "eval_map_medium": 0.3455, "eval_map_small": 0.3802, "eval_mar_1": 0.3523, "eval_mar_10": 0.5861, "eval_mar_100": 0.598, "eval_mar_100_Coverall": 0.7822, "eval_mar_100_Face_Shield": 0.6941, "eval_mar_100_Gloves": 0.4967, "eval_mar_100_Goggles": 0.5188, "eval_mar_100_Mask": 0.4981, "eval_mar_large": 0.795, "eval_mar_medium": 0.5335, "eval_mar_small": 0.4316, "eval_runtime": 0.9163, "eval_samples_per_second": 31.648, "eval_steps_per_second": 2.183, "step": 10750 }, { "epoch": 86.008, "grad_norm": 33.128231048583984, "learning_rate": 7.813333333333334e-06, "loss": 1.1803, "step": 10751 }, { "epoch": 86.016, "grad_norm": 15.584942817687988, "learning_rate": 7.808888888888888e-06, "loss": 1.5081, "step": 10752 }, { "epoch": 86.024, "grad_norm": 17.181459426879883, "learning_rate": 7.804444444444445e-06, "loss": 1.2394, "step": 10753 }, { "epoch": 86.032, "grad_norm": 29.626331329345703, "learning_rate": 7.8e-06, "loss": 0.9091, "step": 10754 }, { "epoch": 86.04, "grad_norm": 27.09749412536621, "learning_rate": 7.795555555555556e-06, "loss": 0.9177, "step": 10755 }, { "epoch": 86.048, "grad_norm": 22.5849552154541, "learning_rate": 7.791111111111111e-06, "loss": 0.9058, "step": 10756 }, { "epoch": 86.056, "grad_norm": 23.346282958984375, "learning_rate": 7.786666666666668e-06, "loss": 0.6287, "step": 10757 }, { "epoch": 86.064, "grad_norm": 39.34840393066406, "learning_rate": 7.782222222222223e-06, "loss": 0.6971, "step": 10758 }, { "epoch": 86.072, "grad_norm": 17.790546417236328, "learning_rate": 7.777777777777777e-06, "loss": 0.9485, "step": 10759 }, { "epoch": 86.08, "grad_norm": 52.226707458496094, "learning_rate": 7.773333333333334e-06, "loss": 0.9449, "step": 10760 }, { "epoch": 86.088, "grad_norm": 198.43276977539062, "learning_rate": 7.768888888888889e-06, "loss": 1.4187, "step": 10761 }, { "epoch": 86.096, "grad_norm": 21.702472686767578, "learning_rate": 7.764444444444445e-06, "loss": 0.9344, "step": 10762 }, { "epoch": 86.104, "grad_norm": 22.085376739501953, "learning_rate": 7.76e-06, "loss": 0.7456, "step": 10763 }, { "epoch": 86.112, "grad_norm": 16.439966201782227, "learning_rate": 7.755555555555557e-06, "loss": 0.7328, "step": 10764 }, { "epoch": 86.12, "grad_norm": 14.60367488861084, "learning_rate": 7.751111111111112e-06, "loss": 0.7668, "step": 10765 }, { "epoch": 86.128, "grad_norm": 24.662147521972656, "learning_rate": 7.746666666666668e-06, "loss": 0.7605, "step": 10766 }, { "epoch": 86.136, "grad_norm": 23.659151077270508, "learning_rate": 7.742222222222223e-06, "loss": 0.5483, "step": 10767 }, { "epoch": 86.144, "grad_norm": 32.718116760253906, "learning_rate": 7.737777777777778e-06, "loss": 0.7496, "step": 10768 }, { "epoch": 86.152, "grad_norm": 30.738750457763672, "learning_rate": 7.733333333333334e-06, "loss": 0.8529, "step": 10769 }, { "epoch": 86.16, "grad_norm": 18.898252487182617, "learning_rate": 7.72888888888889e-06, "loss": 0.5516, "step": 10770 }, { "epoch": 86.168, "grad_norm": 45.56102752685547, "learning_rate": 7.724444444444446e-06, "loss": 1.132, "step": 10771 }, { "epoch": 86.176, "grad_norm": 21.01708984375, "learning_rate": 7.72e-06, "loss": 0.6061, "step": 10772 }, { "epoch": 86.184, "grad_norm": 21.130434036254883, "learning_rate": 7.715555555555557e-06, "loss": 0.6439, "step": 10773 }, { "epoch": 86.192, "grad_norm": 28.43838882446289, "learning_rate": 7.711111111111112e-06, "loss": 0.685, "step": 10774 }, { "epoch": 86.2, "grad_norm": 17.519227981567383, "learning_rate": 7.706666666666667e-06, "loss": 0.8512, "step": 10775 }, { "epoch": 86.208, "grad_norm": 15.5980224609375, "learning_rate": 7.702222222222223e-06, "loss": 0.7319, "step": 10776 }, { "epoch": 86.216, "grad_norm": 16.119508743286133, "learning_rate": 7.697777777777778e-06, "loss": 1.0673, "step": 10777 }, { "epoch": 86.224, "grad_norm": 21.567699432373047, "learning_rate": 7.693333333333335e-06, "loss": 0.8264, "step": 10778 }, { "epoch": 86.232, "grad_norm": 15.551682472229004, "learning_rate": 7.68888888888889e-06, "loss": 0.89, "step": 10779 }, { "epoch": 86.24, "grad_norm": 30.892004013061523, "learning_rate": 7.684444444444446e-06, "loss": 0.8156, "step": 10780 }, { "epoch": 86.248, "grad_norm": 57.62043380737305, "learning_rate": 7.68e-06, "loss": 0.6564, "step": 10781 }, { "epoch": 86.256, "grad_norm": 19.06979751586914, "learning_rate": 7.675555555555556e-06, "loss": 0.8614, "step": 10782 }, { "epoch": 86.264, "grad_norm": 14.282329559326172, "learning_rate": 7.67111111111111e-06, "loss": 0.7476, "step": 10783 }, { "epoch": 86.272, "grad_norm": 33.19206619262695, "learning_rate": 7.666666666666667e-06, "loss": 0.8996, "step": 10784 }, { "epoch": 86.28, "grad_norm": 28.921085357666016, "learning_rate": 7.662222222222222e-06, "loss": 0.6423, "step": 10785 }, { "epoch": 86.288, "grad_norm": 27.63017463684082, "learning_rate": 7.657777777777777e-06, "loss": 0.7178, "step": 10786 }, { "epoch": 86.296, "grad_norm": 21.112995147705078, "learning_rate": 7.653333333333333e-06, "loss": 0.7133, "step": 10787 }, { "epoch": 86.304, "grad_norm": 39.13127136230469, "learning_rate": 7.648888888888888e-06, "loss": 0.7718, "step": 10788 }, { "epoch": 86.312, "grad_norm": 26.341421127319336, "learning_rate": 7.644444444444445e-06, "loss": 0.787, "step": 10789 }, { "epoch": 86.32, "grad_norm": 63.9154052734375, "learning_rate": 7.64e-06, "loss": 1.0185, "step": 10790 }, { "epoch": 86.328, "grad_norm": 35.95541000366211, "learning_rate": 7.635555555555556e-06, "loss": 0.9771, "step": 10791 }, { "epoch": 86.336, "grad_norm": 41.0498046875, "learning_rate": 7.631111111111111e-06, "loss": 0.5939, "step": 10792 }, { "epoch": 86.344, "grad_norm": 19.99672508239746, "learning_rate": 7.626666666666667e-06, "loss": 0.721, "step": 10793 }, { "epoch": 86.352, "grad_norm": 21.91262435913086, "learning_rate": 7.6222222222222225e-06, "loss": 0.832, "step": 10794 }, { "epoch": 86.36, "grad_norm": 18.17821502685547, "learning_rate": 7.617777777777778e-06, "loss": 0.6672, "step": 10795 }, { "epoch": 86.368, "grad_norm": 23.589282989501953, "learning_rate": 7.613333333333334e-06, "loss": 0.7422, "step": 10796 }, { "epoch": 86.376, "grad_norm": 18.184036254882812, "learning_rate": 7.608888888888889e-06, "loss": 0.6656, "step": 10797 }, { "epoch": 86.384, "grad_norm": 24.046424865722656, "learning_rate": 7.604444444444444e-06, "loss": 0.9824, "step": 10798 }, { "epoch": 86.392, "grad_norm": 21.951351165771484, "learning_rate": 7.6e-06, "loss": 0.6938, "step": 10799 }, { "epoch": 86.4, "grad_norm": 19.0213680267334, "learning_rate": 7.595555555555556e-06, "loss": 0.7991, "step": 10800 }, { "epoch": 86.408, "grad_norm": 223.3821563720703, "learning_rate": 7.5911111111111115e-06, "loss": 0.7607, "step": 10801 }, { "epoch": 86.416, "grad_norm": 38.54779815673828, "learning_rate": 7.586666666666667e-06, "loss": 1.958, "step": 10802 }, { "epoch": 86.424, "grad_norm": 18.702421188354492, "learning_rate": 7.582222222222223e-06, "loss": 0.7626, "step": 10803 }, { "epoch": 86.432, "grad_norm": 22.977251052856445, "learning_rate": 7.577777777777778e-06, "loss": 1.1783, "step": 10804 }, { "epoch": 86.44, "grad_norm": 13.956130027770996, "learning_rate": 7.573333333333333e-06, "loss": 0.6295, "step": 10805 }, { "epoch": 86.448, "grad_norm": 34.5149040222168, "learning_rate": 7.568888888888889e-06, "loss": 0.6366, "step": 10806 }, { "epoch": 86.456, "grad_norm": 21.448705673217773, "learning_rate": 7.564444444444445e-06, "loss": 1.3599, "step": 10807 }, { "epoch": 86.464, "grad_norm": 22.066165924072266, "learning_rate": 7.5600000000000005e-06, "loss": 0.8836, "step": 10808 }, { "epoch": 86.472, "grad_norm": 18.479705810546875, "learning_rate": 7.555555555555556e-06, "loss": 0.8022, "step": 10809 }, { "epoch": 86.48, "grad_norm": 28.701507568359375, "learning_rate": 7.551111111111112e-06, "loss": 0.7783, "step": 10810 }, { "epoch": 86.488, "grad_norm": 26.44816017150879, "learning_rate": 7.5466666666666675e-06, "loss": 0.583, "step": 10811 }, { "epoch": 86.496, "grad_norm": 71.67562866210938, "learning_rate": 7.542222222222222e-06, "loss": 2.5633, "step": 10812 }, { "epoch": 86.504, "grad_norm": 78.96871948242188, "learning_rate": 7.537777777777778e-06, "loss": 0.7962, "step": 10813 }, { "epoch": 86.512, "grad_norm": 38.61671829223633, "learning_rate": 7.533333333333334e-06, "loss": 0.7705, "step": 10814 }, { "epoch": 86.52, "grad_norm": 20.575857162475586, "learning_rate": 7.5288888888888895e-06, "loss": 0.5831, "step": 10815 }, { "epoch": 86.528, "grad_norm": 22.474332809448242, "learning_rate": 7.524444444444445e-06, "loss": 0.8458, "step": 10816 }, { "epoch": 86.536, "grad_norm": 30.68370819091797, "learning_rate": 7.520000000000001e-06, "loss": 0.702, "step": 10817 }, { "epoch": 86.544, "grad_norm": 26.057680130004883, "learning_rate": 7.5155555555555565e-06, "loss": 0.462, "step": 10818 }, { "epoch": 86.552, "grad_norm": 46.327415466308594, "learning_rate": 7.511111111111112e-06, "loss": 0.8594, "step": 10819 }, { "epoch": 86.56, "grad_norm": 32.1354866027832, "learning_rate": 7.506666666666667e-06, "loss": 1.0025, "step": 10820 }, { "epoch": 86.568, "grad_norm": 379.061767578125, "learning_rate": 7.502222222222223e-06, "loss": 0.7835, "step": 10821 }, { "epoch": 86.576, "grad_norm": 26.957218170166016, "learning_rate": 7.4977777777777785e-06, "loss": 1.3827, "step": 10822 }, { "epoch": 86.584, "grad_norm": 26.125835418701172, "learning_rate": 7.493333333333334e-06, "loss": 0.6038, "step": 10823 }, { "epoch": 86.592, "grad_norm": 22.857454299926758, "learning_rate": 7.48888888888889e-06, "loss": 0.8549, "step": 10824 }, { "epoch": 86.6, "grad_norm": 15.618836402893066, "learning_rate": 7.4844444444444455e-06, "loss": 0.8563, "step": 10825 }, { "epoch": 86.608, "grad_norm": 31.25887107849121, "learning_rate": 7.480000000000001e-06, "loss": 0.8793, "step": 10826 }, { "epoch": 86.616, "grad_norm": 36.51592254638672, "learning_rate": 7.475555555555556e-06, "loss": 1.1285, "step": 10827 }, { "epoch": 86.624, "grad_norm": 44.19888687133789, "learning_rate": 7.471111111111112e-06, "loss": 0.593, "step": 10828 }, { "epoch": 86.632, "grad_norm": 23.053335189819336, "learning_rate": 7.4666666666666675e-06, "loss": 0.8881, "step": 10829 }, { "epoch": 86.64, "grad_norm": 16.169017791748047, "learning_rate": 7.462222222222223e-06, "loss": 0.9379, "step": 10830 }, { "epoch": 86.648, "grad_norm": 14.153270721435547, "learning_rate": 7.457777777777779e-06, "loss": 0.9746, "step": 10831 }, { "epoch": 86.656, "grad_norm": 42.68016052246094, "learning_rate": 7.453333333333333e-06, "loss": 0.7978, "step": 10832 }, { "epoch": 86.664, "grad_norm": 27.129350662231445, "learning_rate": 7.4488888888888885e-06, "loss": 0.7292, "step": 10833 }, { "epoch": 86.672, "grad_norm": 24.546091079711914, "learning_rate": 7.444444444444444e-06, "loss": 0.8136, "step": 10834 }, { "epoch": 86.68, "grad_norm": 25.369138717651367, "learning_rate": 7.44e-06, "loss": 0.7811, "step": 10835 }, { "epoch": 86.688, "grad_norm": 48.467315673828125, "learning_rate": 7.435555555555556e-06, "loss": 0.9022, "step": 10836 }, { "epoch": 86.696, "grad_norm": 28.498050689697266, "learning_rate": 7.431111111111111e-06, "loss": 1.017, "step": 10837 }, { "epoch": 86.704, "grad_norm": 32.459014892578125, "learning_rate": 7.426666666666666e-06, "loss": 1.6228, "step": 10838 }, { "epoch": 86.712, "grad_norm": 19.506181716918945, "learning_rate": 7.422222222222222e-06, "loss": 0.891, "step": 10839 }, { "epoch": 86.72, "grad_norm": 17.631366729736328, "learning_rate": 7.4177777777777775e-06, "loss": 0.6554, "step": 10840 }, { "epoch": 86.728, "grad_norm": 21.60854148864746, "learning_rate": 7.413333333333333e-06, "loss": 0.8238, "step": 10841 }, { "epoch": 86.736, "grad_norm": 17.84695816040039, "learning_rate": 7.408888888888889e-06, "loss": 1.0246, "step": 10842 }, { "epoch": 86.744, "grad_norm": 21.433582305908203, "learning_rate": 7.404444444444445e-06, "loss": 0.9098, "step": 10843 }, { "epoch": 86.752, "grad_norm": 31.56831932067871, "learning_rate": 7.4e-06, "loss": 1.1511, "step": 10844 }, { "epoch": 86.76, "grad_norm": 25.306594848632812, "learning_rate": 7.395555555555556e-06, "loss": 0.7378, "step": 10845 }, { "epoch": 86.768, "grad_norm": 16.920324325561523, "learning_rate": 7.391111111111111e-06, "loss": 0.8392, "step": 10846 }, { "epoch": 86.776, "grad_norm": 32.59617614746094, "learning_rate": 7.3866666666666665e-06, "loss": 0.8957, "step": 10847 }, { "epoch": 86.784, "grad_norm": 48.476505279541016, "learning_rate": 7.382222222222222e-06, "loss": 0.6798, "step": 10848 }, { "epoch": 86.792, "grad_norm": 29.836894989013672, "learning_rate": 7.377777777777778e-06, "loss": 1.1016, "step": 10849 }, { "epoch": 86.8, "grad_norm": 27.205522537231445, "learning_rate": 7.373333333333334e-06, "loss": 1.203, "step": 10850 }, { "epoch": 86.808, "grad_norm": 14.833002090454102, "learning_rate": 7.368888888888889e-06, "loss": 0.8645, "step": 10851 }, { "epoch": 86.816, "grad_norm": 32.322967529296875, "learning_rate": 7.364444444444445e-06, "loss": 0.8472, "step": 10852 }, { "epoch": 86.824, "grad_norm": 31.46375274658203, "learning_rate": 7.36e-06, "loss": 1.7384, "step": 10853 }, { "epoch": 86.832, "grad_norm": 25.1922550201416, "learning_rate": 7.3555555555555555e-06, "loss": 0.9166, "step": 10854 }, { "epoch": 86.84, "grad_norm": 21.175512313842773, "learning_rate": 7.351111111111111e-06, "loss": 0.6326, "step": 10855 }, { "epoch": 86.848, "grad_norm": 20.88690757751465, "learning_rate": 7.346666666666667e-06, "loss": 1.1316, "step": 10856 }, { "epoch": 86.856, "grad_norm": 54.46714401245117, "learning_rate": 7.342222222222223e-06, "loss": 0.8641, "step": 10857 }, { "epoch": 86.864, "grad_norm": 20.780452728271484, "learning_rate": 7.337777777777778e-06, "loss": 1.4386, "step": 10858 }, { "epoch": 86.872, "grad_norm": 25.90152359008789, "learning_rate": 7.333333333333334e-06, "loss": 0.6223, "step": 10859 }, { "epoch": 86.88, "grad_norm": 23.594064712524414, "learning_rate": 7.32888888888889e-06, "loss": 1.1414, "step": 10860 }, { "epoch": 86.888, "grad_norm": 22.81958770751953, "learning_rate": 7.3244444444444445e-06, "loss": 0.6436, "step": 10861 }, { "epoch": 86.896, "grad_norm": 29.382844924926758, "learning_rate": 7.32e-06, "loss": 0.8223, "step": 10862 }, { "epoch": 86.904, "grad_norm": 15.834494590759277, "learning_rate": 7.315555555555556e-06, "loss": 0.8636, "step": 10863 }, { "epoch": 86.912, "grad_norm": 16.162750244140625, "learning_rate": 7.311111111111112e-06, "loss": 0.8828, "step": 10864 }, { "epoch": 86.92, "grad_norm": 16.046924591064453, "learning_rate": 7.306666666666667e-06, "loss": 0.6312, "step": 10865 }, { "epoch": 86.928, "grad_norm": 22.01559066772461, "learning_rate": 7.302222222222223e-06, "loss": 0.8413, "step": 10866 }, { "epoch": 86.936, "grad_norm": 32.304996490478516, "learning_rate": 7.297777777777779e-06, "loss": 0.7984, "step": 10867 }, { "epoch": 86.944, "grad_norm": 15.95419692993164, "learning_rate": 7.293333333333334e-06, "loss": 0.9372, "step": 10868 }, { "epoch": 86.952, "grad_norm": 22.855819702148438, "learning_rate": 7.288888888888889e-06, "loss": 0.7797, "step": 10869 }, { "epoch": 86.96, "grad_norm": 35.53315353393555, "learning_rate": 7.284444444444445e-06, "loss": 0.5279, "step": 10870 }, { "epoch": 86.968, "grad_norm": 17.161983489990234, "learning_rate": 7.280000000000001e-06, "loss": 0.5501, "step": 10871 }, { "epoch": 86.976, "grad_norm": 34.20730209350586, "learning_rate": 7.275555555555556e-06, "loss": 0.534, "step": 10872 }, { "epoch": 86.984, "grad_norm": 28.581445693969727, "learning_rate": 7.271111111111112e-06, "loss": 0.6112, "step": 10873 }, { "epoch": 86.992, "grad_norm": 23.85624122619629, "learning_rate": 7.266666666666668e-06, "loss": 0.7937, "step": 10874 }, { "epoch": 87.0, "grad_norm": 27.19939613342285, "learning_rate": 7.262222222222223e-06, "loss": 0.6413, "step": 10875 }, { "epoch": 87.0, "eval_loss": 0.9445920586585999, "eval_map": 0.46, "eval_map_50": 0.7976, "eval_map_75": 0.4392, "eval_map_Coverall": 0.6759, "eval_map_Face_Shield": 0.5781, "eval_map_Gloves": 0.3593, "eval_map_Goggles": 0.2789, "eval_map_Mask": 0.4078, "eval_map_large": 0.6912, "eval_map_medium": 0.3094, "eval_map_small": 0.4057, "eval_mar_1": 0.3553, "eval_mar_10": 0.5849, "eval_mar_100": 0.5991, "eval_mar_100_Coverall": 0.7733, "eval_mar_100_Face_Shield": 0.7059, "eval_mar_100_Gloves": 0.4885, "eval_mar_100_Goggles": 0.5219, "eval_mar_100_Mask": 0.5058, "eval_mar_large": 0.8059, "eval_mar_medium": 0.4952, "eval_mar_small": 0.4647, "eval_runtime": 0.9184, "eval_samples_per_second": 31.577, "eval_steps_per_second": 2.178, "step": 10875 }, { "epoch": 87.008, "grad_norm": 16.97153663635254, "learning_rate": 7.257777777777778e-06, "loss": 1.0881, "step": 10876 }, { "epoch": 87.016, "grad_norm": 15.656050682067871, "learning_rate": 7.253333333333334e-06, "loss": 0.6519, "step": 10877 }, { "epoch": 87.024, "grad_norm": 19.414392471313477, "learning_rate": 7.24888888888889e-06, "loss": 0.6599, "step": 10878 }, { "epoch": 87.032, "grad_norm": 17.87226676940918, "learning_rate": 7.244444444444445e-06, "loss": 0.5205, "step": 10879 }, { "epoch": 87.04, "grad_norm": 19.781633377075195, "learning_rate": 7.240000000000001e-06, "loss": 1.0409, "step": 10880 }, { "epoch": 87.048, "grad_norm": 15.976301193237305, "learning_rate": 7.235555555555557e-06, "loss": 0.9548, "step": 10881 }, { "epoch": 87.056, "grad_norm": 146.09217834472656, "learning_rate": 7.231111111111112e-06, "loss": 0.9595, "step": 10882 }, { "epoch": 87.064, "grad_norm": 18.439895629882812, "learning_rate": 7.226666666666668e-06, "loss": 1.0507, "step": 10883 }, { "epoch": 87.072, "grad_norm": 39.04924392700195, "learning_rate": 7.222222222222222e-06, "loss": 0.8575, "step": 10884 }, { "epoch": 87.08, "grad_norm": 27.440574645996094, "learning_rate": 7.217777777777778e-06, "loss": 0.7257, "step": 10885 }, { "epoch": 87.088, "grad_norm": 26.36521339416504, "learning_rate": 7.2133333333333334e-06, "loss": 0.8714, "step": 10886 }, { "epoch": 87.096, "grad_norm": 27.09841537475586, "learning_rate": 7.208888888888888e-06, "loss": 0.662, "step": 10887 }, { "epoch": 87.104, "grad_norm": 118.28044891357422, "learning_rate": 7.204444444444444e-06, "loss": 0.8214, "step": 10888 }, { "epoch": 87.112, "grad_norm": 21.118675231933594, "learning_rate": 7.2e-06, "loss": 0.6446, "step": 10889 }, { "epoch": 87.12, "grad_norm": 51.181827545166016, "learning_rate": 7.195555555555555e-06, "loss": 0.5943, "step": 10890 }, { "epoch": 87.128, "grad_norm": 29.903095245361328, "learning_rate": 7.191111111111111e-06, "loss": 0.6547, "step": 10891 }, { "epoch": 87.136, "grad_norm": 20.077585220336914, "learning_rate": 7.186666666666667e-06, "loss": 1.767, "step": 10892 }, { "epoch": 87.144, "grad_norm": 26.422565460205078, "learning_rate": 7.1822222222222224e-06, "loss": 0.5833, "step": 10893 }, { "epoch": 87.152, "grad_norm": Infinity, "learning_rate": 7.1822222222222224e-06, "loss": 1.7, "step": 10894 }, { "epoch": 87.16, "grad_norm": 22.66292953491211, "learning_rate": 7.177777777777778e-06, "loss": 1.8288, "step": 10895 }, { "epoch": 87.168, "grad_norm": 55.1499137878418, "learning_rate": 7.173333333333333e-06, "loss": 0.67, "step": 10896 }, { "epoch": 87.176, "grad_norm": 46.55286407470703, "learning_rate": 7.168888888888889e-06, "loss": 0.6877, "step": 10897 }, { "epoch": 87.184, "grad_norm": 53.323463439941406, "learning_rate": 7.164444444444444e-06, "loss": 0.5693, "step": 10898 }, { "epoch": 87.192, "grad_norm": 21.853940963745117, "learning_rate": 7.16e-06, "loss": 0.7262, "step": 10899 }, { "epoch": 87.2, "grad_norm": 16.513038635253906, "learning_rate": 7.155555555555556e-06, "loss": 0.8681, "step": 10900 }, { "epoch": 87.208, "grad_norm": 11.653072357177734, "learning_rate": 7.1511111111111114e-06, "loss": 0.7354, "step": 10901 }, { "epoch": 87.216, "grad_norm": 21.159669876098633, "learning_rate": 7.146666666666667e-06, "loss": 0.7333, "step": 10902 }, { "epoch": 87.224, "grad_norm": Infinity, "learning_rate": 7.146666666666667e-06, "loss": 0.6811, "step": 10903 }, { "epoch": 87.232, "grad_norm": 21.68131446838379, "learning_rate": 7.142222222222222e-06, "loss": 0.9465, "step": 10904 }, { "epoch": 87.24, "grad_norm": 14.395179748535156, "learning_rate": 7.137777777777778e-06, "loss": 0.6375, "step": 10905 }, { "epoch": 87.248, "grad_norm": 21.828828811645508, "learning_rate": 7.133333333333333e-06, "loss": 0.7472, "step": 10906 }, { "epoch": 87.256, "grad_norm": 23.052194595336914, "learning_rate": 7.128888888888889e-06, "loss": 0.8651, "step": 10907 }, { "epoch": 87.264, "grad_norm": 32.82661437988281, "learning_rate": 7.124444444444445e-06, "loss": 0.6174, "step": 10908 }, { "epoch": 87.272, "grad_norm": 14.670883178710938, "learning_rate": 7.1200000000000004e-06, "loss": 0.7744, "step": 10909 }, { "epoch": 87.28, "grad_norm": 82.37703704833984, "learning_rate": 7.115555555555556e-06, "loss": 0.894, "step": 10910 }, { "epoch": 87.288, "grad_norm": 15.726552963256836, "learning_rate": 7.111111111111112e-06, "loss": 1.1701, "step": 10911 }, { "epoch": 87.296, "grad_norm": 23.104494094848633, "learning_rate": 7.106666666666667e-06, "loss": 0.872, "step": 10912 }, { "epoch": 87.304, "grad_norm": 16.85047149658203, "learning_rate": 7.102222222222222e-06, "loss": 0.9154, "step": 10913 }, { "epoch": 87.312, "grad_norm": 49.828224182128906, "learning_rate": 7.097777777777778e-06, "loss": 0.6673, "step": 10914 }, { "epoch": 87.32, "grad_norm": 23.57131576538086, "learning_rate": 7.093333333333334e-06, "loss": 0.7167, "step": 10915 }, { "epoch": 87.328, "grad_norm": 27.63296127319336, "learning_rate": 7.0888888888888894e-06, "loss": 1.0222, "step": 10916 }, { "epoch": 87.336, "grad_norm": 30.775541305541992, "learning_rate": 7.084444444444445e-06, "loss": 0.6796, "step": 10917 }, { "epoch": 87.344, "grad_norm": 27.72917938232422, "learning_rate": 7.080000000000001e-06, "loss": 0.8341, "step": 10918 }, { "epoch": 87.352, "grad_norm": 16.3826904296875, "learning_rate": 7.0755555555555565e-06, "loss": 0.9287, "step": 10919 }, { "epoch": 87.36, "grad_norm": 17.5123291015625, "learning_rate": 7.071111111111111e-06, "loss": 0.5889, "step": 10920 }, { "epoch": 87.368, "grad_norm": 13.164152145385742, "learning_rate": 7.066666666666667e-06, "loss": 0.7125, "step": 10921 }, { "epoch": 87.376, "grad_norm": 36.025299072265625, "learning_rate": 7.062222222222223e-06, "loss": 0.7654, "step": 10922 }, { "epoch": 87.384, "grad_norm": 16.67941665649414, "learning_rate": 7.0577777777777784e-06, "loss": 0.5678, "step": 10923 }, { "epoch": 87.392, "grad_norm": 11.875500679016113, "learning_rate": 7.053333333333334e-06, "loss": 0.9106, "step": 10924 }, { "epoch": 87.4, "grad_norm": 42.338706970214844, "learning_rate": 7.04888888888889e-06, "loss": 1.3807, "step": 10925 }, { "epoch": 87.408, "grad_norm": 28.01873207092285, "learning_rate": 7.0444444444444455e-06, "loss": 1.1533, "step": 10926 }, { "epoch": 87.416, "grad_norm": 17.967742919921875, "learning_rate": 7.04e-06, "loss": 0.5853, "step": 10927 }, { "epoch": 87.424, "grad_norm": 18.824993133544922, "learning_rate": 7.035555555555556e-06, "loss": 0.7506, "step": 10928 }, { "epoch": 87.432, "grad_norm": 25.765398025512695, "learning_rate": 7.031111111111112e-06, "loss": 0.8155, "step": 10929 }, { "epoch": 87.44, "grad_norm": 39.95828628540039, "learning_rate": 7.0266666666666674e-06, "loss": 0.7873, "step": 10930 }, { "epoch": 87.448, "grad_norm": 21.265113830566406, "learning_rate": 7.022222222222223e-06, "loss": 1.2047, "step": 10931 }, { "epoch": 87.456, "grad_norm": 116.77570343017578, "learning_rate": 7.017777777777779e-06, "loss": 0.916, "step": 10932 }, { "epoch": 87.464, "grad_norm": 45.26508712768555, "learning_rate": 7.0133333333333345e-06, "loss": 0.7328, "step": 10933 }, { "epoch": 87.472, "grad_norm": 148.9374237060547, "learning_rate": 7.00888888888889e-06, "loss": 1.462, "step": 10934 }, { "epoch": 87.48, "grad_norm": 24.99488639831543, "learning_rate": 7.004444444444445e-06, "loss": 0.5653, "step": 10935 }, { "epoch": 87.488, "grad_norm": 25.416303634643555, "learning_rate": 7.000000000000001e-06, "loss": 0.5984, "step": 10936 }, { "epoch": 87.496, "grad_norm": 25.436113357543945, "learning_rate": 6.9955555555555564e-06, "loss": 0.8858, "step": 10937 }, { "epoch": 87.504, "grad_norm": 23.600582122802734, "learning_rate": 6.9911111111111104e-06, "loss": 1.0152, "step": 10938 }, { "epoch": 87.512, "grad_norm": 17.106733322143555, "learning_rate": 6.986666666666666e-06, "loss": 0.8819, "step": 10939 }, { "epoch": 87.52, "grad_norm": 110.9582748413086, "learning_rate": 6.982222222222222e-06, "loss": 0.9084, "step": 10940 }, { "epoch": 87.528, "grad_norm": 23.094396591186523, "learning_rate": 6.9777777777777775e-06, "loss": 0.9635, "step": 10941 }, { "epoch": 87.536, "grad_norm": 35.172664642333984, "learning_rate": 6.973333333333333e-06, "loss": 0.8397, "step": 10942 }, { "epoch": 87.544, "grad_norm": 18.225627899169922, "learning_rate": 6.968888888888889e-06, "loss": 0.6911, "step": 10943 }, { "epoch": 87.552, "grad_norm": 89.6058349609375, "learning_rate": 6.964444444444445e-06, "loss": 0.8266, "step": 10944 }, { "epoch": 87.56, "grad_norm": 25.261470794677734, "learning_rate": 6.9599999999999994e-06, "loss": 0.7112, "step": 10945 }, { "epoch": 87.568, "grad_norm": 17.450496673583984, "learning_rate": 6.955555555555555e-06, "loss": 0.6623, "step": 10946 }, { "epoch": 87.576, "grad_norm": 28.18628692626953, "learning_rate": 6.951111111111111e-06, "loss": 0.9103, "step": 10947 }, { "epoch": 87.584, "grad_norm": 34.116519927978516, "learning_rate": 6.9466666666666665e-06, "loss": 0.9731, "step": 10948 }, { "epoch": 87.592, "grad_norm": 28.94240951538086, "learning_rate": 6.942222222222222e-06, "loss": 1.6543, "step": 10949 }, { "epoch": 87.6, "grad_norm": 122.50019836425781, "learning_rate": 6.937777777777778e-06, "loss": 1.2094, "step": 10950 }, { "epoch": 87.608, "grad_norm": 23.857053756713867, "learning_rate": 6.933333333333334e-06, "loss": 0.7839, "step": 10951 }, { "epoch": 87.616, "grad_norm": 64.4105224609375, "learning_rate": 6.928888888888889e-06, "loss": 0.7735, "step": 10952 }, { "epoch": 87.624, "grad_norm": 40.34792709350586, "learning_rate": 6.924444444444444e-06, "loss": 0.5867, "step": 10953 }, { "epoch": 87.632, "grad_norm": 14.166504859924316, "learning_rate": 6.92e-06, "loss": 0.9591, "step": 10954 }, { "epoch": 87.64, "grad_norm": 18.418468475341797, "learning_rate": 6.9155555555555555e-06, "loss": 0.8666, "step": 10955 }, { "epoch": 87.648, "grad_norm": 46.774967193603516, "learning_rate": 6.911111111111111e-06, "loss": 2.2684, "step": 10956 }, { "epoch": 87.656, "grad_norm": 47.6944580078125, "learning_rate": 6.906666666666667e-06, "loss": 0.8228, "step": 10957 }, { "epoch": 87.664, "grad_norm": 23.853370666503906, "learning_rate": 6.902222222222223e-06, "loss": 0.9288, "step": 10958 }, { "epoch": 87.672, "grad_norm": 23.7213077545166, "learning_rate": 6.897777777777778e-06, "loss": 0.6768, "step": 10959 }, { "epoch": 87.68, "grad_norm": 17.193063735961914, "learning_rate": 6.893333333333334e-06, "loss": 0.8854, "step": 10960 }, { "epoch": 87.688, "grad_norm": 61.56022644042969, "learning_rate": 6.888888888888889e-06, "loss": 0.8717, "step": 10961 }, { "epoch": 87.696, "grad_norm": 18.222633361816406, "learning_rate": 6.8844444444444445e-06, "loss": 0.7083, "step": 10962 }, { "epoch": 87.704, "grad_norm": 42.71918487548828, "learning_rate": 6.88e-06, "loss": 0.9625, "step": 10963 }, { "epoch": 87.712, "grad_norm": 20.64776039123535, "learning_rate": 6.875555555555556e-06, "loss": 0.8375, "step": 10964 }, { "epoch": 87.72, "grad_norm": 24.70602798461914, "learning_rate": 6.871111111111112e-06, "loss": 0.961, "step": 10965 }, { "epoch": 87.728, "grad_norm": 22.850627899169922, "learning_rate": 6.866666666666667e-06, "loss": 0.9535, "step": 10966 }, { "epoch": 87.736, "grad_norm": 25.767065048217773, "learning_rate": 6.862222222222223e-06, "loss": 0.6159, "step": 10967 }, { "epoch": 87.744, "grad_norm": 18.723407745361328, "learning_rate": 6.857777777777779e-06, "loss": 0.8884, "step": 10968 }, { "epoch": 87.752, "grad_norm": 64.5700454711914, "learning_rate": 6.8533333333333335e-06, "loss": 0.5966, "step": 10969 }, { "epoch": 87.76, "grad_norm": 17.45231819152832, "learning_rate": 6.848888888888889e-06, "loss": 1.1585, "step": 10970 }, { "epoch": 87.768, "grad_norm": 20.668054580688477, "learning_rate": 6.844444444444445e-06, "loss": 0.6547, "step": 10971 }, { "epoch": 87.776, "grad_norm": 30.333141326904297, "learning_rate": 6.840000000000001e-06, "loss": 1.1003, "step": 10972 }, { "epoch": 87.784, "grad_norm": 57.72286605834961, "learning_rate": 6.835555555555556e-06, "loss": 0.8204, "step": 10973 }, { "epoch": 87.792, "grad_norm": 17.069278717041016, "learning_rate": 6.831111111111112e-06, "loss": 0.7563, "step": 10974 }, { "epoch": 87.8, "grad_norm": 27.203147888183594, "learning_rate": 6.826666666666668e-06, "loss": 0.7515, "step": 10975 }, { "epoch": 87.808, "grad_norm": 24.78919219970703, "learning_rate": 6.8222222222222225e-06, "loss": 0.8872, "step": 10976 }, { "epoch": 87.816, "grad_norm": 25.465023040771484, "learning_rate": 6.817777777777778e-06, "loss": 0.7653, "step": 10977 }, { "epoch": 87.824, "grad_norm": 21.88853645324707, "learning_rate": 6.813333333333334e-06, "loss": 0.581, "step": 10978 }, { "epoch": 87.832, "grad_norm": 22.30812644958496, "learning_rate": 6.80888888888889e-06, "loss": 0.747, "step": 10979 }, { "epoch": 87.84, "grad_norm": 24.862272262573242, "learning_rate": 6.804444444444445e-06, "loss": 0.5519, "step": 10980 }, { "epoch": 87.848, "grad_norm": 15.900065422058105, "learning_rate": 6.800000000000001e-06, "loss": 0.8358, "step": 10981 }, { "epoch": 87.856, "grad_norm": 22.596996307373047, "learning_rate": 6.795555555555557e-06, "loss": 0.7614, "step": 10982 }, { "epoch": 87.864, "grad_norm": 23.293041229248047, "learning_rate": 6.791111111111112e-06, "loss": 0.8285, "step": 10983 }, { "epoch": 87.872, "grad_norm": 13.218465805053711, "learning_rate": 6.786666666666667e-06, "loss": 0.7814, "step": 10984 }, { "epoch": 87.88, "grad_norm": 35.18043899536133, "learning_rate": 6.782222222222223e-06, "loss": 0.6699, "step": 10985 }, { "epoch": 87.888, "grad_norm": 23.49252700805664, "learning_rate": 6.777777777777779e-06, "loss": 0.5592, "step": 10986 }, { "epoch": 87.896, "grad_norm": 14.521470069885254, "learning_rate": 6.773333333333334e-06, "loss": 0.7713, "step": 10987 }, { "epoch": 87.904, "grad_norm": 20.68399429321289, "learning_rate": 6.76888888888889e-06, "loss": 0.7449, "step": 10988 }, { "epoch": 87.912, "grad_norm": 17.304431915283203, "learning_rate": 6.764444444444444e-06, "loss": 1.0397, "step": 10989 }, { "epoch": 87.92, "grad_norm": 25.31048583984375, "learning_rate": 6.76e-06, "loss": 0.8473, "step": 10990 }, { "epoch": 87.928, "grad_norm": 31.352191925048828, "learning_rate": 6.755555555555555e-06, "loss": 0.7642, "step": 10991 }, { "epoch": 87.936, "grad_norm": 14.804669380187988, "learning_rate": 6.751111111111111e-06, "loss": 0.7789, "step": 10992 }, { "epoch": 87.944, "grad_norm": 23.2509822845459, "learning_rate": 6.746666666666667e-06, "loss": 0.665, "step": 10993 }, { "epoch": 87.952, "grad_norm": 36.27638626098633, "learning_rate": 6.7422222222222216e-06, "loss": 1.5492, "step": 10994 }, { "epoch": 87.96, "grad_norm": 29.829700469970703, "learning_rate": 6.737777777777777e-06, "loss": 1.0882, "step": 10995 }, { "epoch": 87.968, "grad_norm": 35.04743576049805, "learning_rate": 6.733333333333333e-06, "loss": 0.8519, "step": 10996 }, { "epoch": 87.976, "grad_norm": 47.84653091430664, "learning_rate": 6.728888888888889e-06, "loss": 0.6436, "step": 10997 }, { "epoch": 87.984, "grad_norm": 49.45502471923828, "learning_rate": 6.724444444444444e-06, "loss": 0.7386, "step": 10998 }, { "epoch": 87.992, "grad_norm": 47.08482360839844, "learning_rate": 6.72e-06, "loss": 0.7302, "step": 10999 }, { "epoch": 88.0, "grad_norm": 14.903352737426758, "learning_rate": 6.715555555555556e-06, "loss": 0.9067, "step": 11000 }, { "epoch": 88.0, "eval_loss": 0.9204633235931396, "eval_map": 0.4836, "eval_map_50": 0.822, "eval_map_75": 0.5166, "eval_map_Coverall": 0.6889, "eval_map_Face_Shield": 0.5809, "eval_map_Gloves": 0.4008, "eval_map_Goggles": 0.2951, "eval_map_Mask": 0.4523, "eval_map_large": 0.672, "eval_map_medium": 0.3553, "eval_map_small": 0.3995, "eval_mar_1": 0.3596, "eval_mar_10": 0.6032, "eval_mar_100": 0.616, "eval_mar_100_Coverall": 0.7711, "eval_mar_100_Face_Shield": 0.7176, "eval_mar_100_Gloves": 0.5098, "eval_mar_100_Goggles": 0.5469, "eval_mar_100_Mask": 0.5346, "eval_mar_large": 0.7861, "eval_mar_medium": 0.5323, "eval_mar_small": 0.4632, "eval_runtime": 0.9126, "eval_samples_per_second": 31.778, "eval_steps_per_second": 2.192, "step": 11000 }, { "epoch": 88.008, "grad_norm": 25.556493759155273, "learning_rate": 6.711111111111111e-06, "loss": 0.6052, "step": 11001 }, { "epoch": 88.016, "grad_norm": 71.71271514892578, "learning_rate": 6.706666666666666e-06, "loss": 0.9653, "step": 11002 }, { "epoch": 88.024, "grad_norm": 14.041586875915527, "learning_rate": 6.702222222222222e-06, "loss": 0.7282, "step": 11003 }, { "epoch": 88.032, "grad_norm": 20.61297607421875, "learning_rate": 6.697777777777778e-06, "loss": 0.8428, "step": 11004 }, { "epoch": 88.04, "grad_norm": 23.058103561401367, "learning_rate": 6.693333333333333e-06, "loss": 1.0256, "step": 11005 }, { "epoch": 88.048, "grad_norm": 28.85516929626465, "learning_rate": 6.688888888888889e-06, "loss": 0.9712, "step": 11006 }, { "epoch": 88.056, "grad_norm": 17.921775817871094, "learning_rate": 6.684444444444445e-06, "loss": 0.6102, "step": 11007 }, { "epoch": 88.064, "grad_norm": 41.568885803222656, "learning_rate": 6.68e-06, "loss": 0.8178, "step": 11008 }, { "epoch": 88.072, "grad_norm": 18.304304122924805, "learning_rate": 6.675555555555556e-06, "loss": 0.7063, "step": 11009 }, { "epoch": 88.08, "grad_norm": 32.756629943847656, "learning_rate": 6.671111111111111e-06, "loss": 0.8008, "step": 11010 }, { "epoch": 88.088, "grad_norm": 21.582122802734375, "learning_rate": 6.666666666666667e-06, "loss": 0.8882, "step": 11011 }, { "epoch": 88.096, "grad_norm": 40.88261413574219, "learning_rate": 6.662222222222222e-06, "loss": 0.8212, "step": 11012 }, { "epoch": 88.104, "grad_norm": 16.997831344604492, "learning_rate": 6.657777777777778e-06, "loss": 0.5583, "step": 11013 }, { "epoch": 88.112, "grad_norm": 23.201326370239258, "learning_rate": 6.653333333333334e-06, "loss": 0.6981, "step": 11014 }, { "epoch": 88.12, "grad_norm": 29.497438430786133, "learning_rate": 6.648888888888889e-06, "loss": 0.6589, "step": 11015 }, { "epoch": 88.128, "grad_norm": 32.892845153808594, "learning_rate": 6.644444444444445e-06, "loss": 1.0619, "step": 11016 }, { "epoch": 88.136, "grad_norm": 35.551849365234375, "learning_rate": 6.640000000000001e-06, "loss": 0.919, "step": 11017 }, { "epoch": 88.144, "grad_norm": 32.704078674316406, "learning_rate": 6.635555555555556e-06, "loss": 0.9717, "step": 11018 }, { "epoch": 88.152, "grad_norm": 48.10066604614258, "learning_rate": 6.631111111111111e-06, "loss": 1.6331, "step": 11019 }, { "epoch": 88.16, "grad_norm": 25.420011520385742, "learning_rate": 6.626666666666667e-06, "loss": 0.7687, "step": 11020 }, { "epoch": 88.168, "grad_norm": 52.75025939941406, "learning_rate": 6.622222222222223e-06, "loss": 0.6808, "step": 11021 }, { "epoch": 88.176, "grad_norm": 53.93169403076172, "learning_rate": 6.617777777777778e-06, "loss": 0.6594, "step": 11022 }, { "epoch": 88.184, "grad_norm": 23.355756759643555, "learning_rate": 6.613333333333334e-06, "loss": 0.7405, "step": 11023 }, { "epoch": 88.192, "grad_norm": 17.10260772705078, "learning_rate": 6.60888888888889e-06, "loss": 0.6268, "step": 11024 }, { "epoch": 88.2, "grad_norm": 17.043737411499023, "learning_rate": 6.604444444444445e-06, "loss": 0.8121, "step": 11025 }, { "epoch": 88.208, "grad_norm": 73.12310028076172, "learning_rate": 6.6e-06, "loss": 0.9938, "step": 11026 }, { "epoch": 88.216, "grad_norm": 29.305984497070312, "learning_rate": 6.595555555555556e-06, "loss": 1.2543, "step": 11027 }, { "epoch": 88.224, "grad_norm": 12.852109909057617, "learning_rate": 6.591111111111112e-06, "loss": 0.679, "step": 11028 }, { "epoch": 88.232, "grad_norm": 13.76037883758545, "learning_rate": 6.586666666666667e-06, "loss": 1.1065, "step": 11029 }, { "epoch": 88.24, "grad_norm": 13.256189346313477, "learning_rate": 6.582222222222223e-06, "loss": 0.5902, "step": 11030 }, { "epoch": 88.248, "grad_norm": 19.581207275390625, "learning_rate": 6.577777777777779e-06, "loss": 0.6462, "step": 11031 }, { "epoch": 88.256, "grad_norm": 19.401460647583008, "learning_rate": 6.5733333333333345e-06, "loss": 0.6085, "step": 11032 }, { "epoch": 88.264, "grad_norm": 16.127988815307617, "learning_rate": 6.568888888888889e-06, "loss": 0.5029, "step": 11033 }, { "epoch": 88.272, "grad_norm": 14.511476516723633, "learning_rate": 6.564444444444445e-06, "loss": 0.5909, "step": 11034 }, { "epoch": 88.28, "grad_norm": 24.627826690673828, "learning_rate": 6.560000000000001e-06, "loss": 0.7805, "step": 11035 }, { "epoch": 88.288, "grad_norm": 21.595380783081055, "learning_rate": 6.555555555555556e-06, "loss": 0.5192, "step": 11036 }, { "epoch": 88.296, "grad_norm": 18.26093292236328, "learning_rate": 6.551111111111112e-06, "loss": 0.746, "step": 11037 }, { "epoch": 88.304, "grad_norm": 16.25975799560547, "learning_rate": 6.546666666666668e-06, "loss": 1.0292, "step": 11038 }, { "epoch": 88.312, "grad_norm": 31.77446174621582, "learning_rate": 6.5422222222222235e-06, "loss": 1.0192, "step": 11039 }, { "epoch": 88.32, "grad_norm": 24.027433395385742, "learning_rate": 6.537777777777779e-06, "loss": 0.9829, "step": 11040 }, { "epoch": 88.328, "grad_norm": 32.220184326171875, "learning_rate": 6.533333333333333e-06, "loss": 0.858, "step": 11041 }, { "epoch": 88.336, "grad_norm": 22.588809967041016, "learning_rate": 6.528888888888889e-06, "loss": 2.3882, "step": 11042 }, { "epoch": 88.344, "grad_norm": 27.482975006103516, "learning_rate": 6.524444444444444e-06, "loss": 0.8039, "step": 11043 }, { "epoch": 88.352, "grad_norm": 29.460575103759766, "learning_rate": 6.519999999999999e-06, "loss": 0.5797, "step": 11044 }, { "epoch": 88.36, "grad_norm": 35.33230972290039, "learning_rate": 6.515555555555555e-06, "loss": 0.7677, "step": 11045 }, { "epoch": 88.368, "grad_norm": 21.10413360595703, "learning_rate": 6.511111111111111e-06, "loss": 1.0027, "step": 11046 }, { "epoch": 88.376, "grad_norm": 43.735374450683594, "learning_rate": 6.5066666666666665e-06, "loss": 0.9426, "step": 11047 }, { "epoch": 88.384, "grad_norm": 22.71929168701172, "learning_rate": 6.502222222222222e-06, "loss": 0.8398, "step": 11048 }, { "epoch": 88.392, "grad_norm": 26.312368392944336, "learning_rate": 6.497777777777778e-06, "loss": 0.6867, "step": 11049 }, { "epoch": 88.4, "grad_norm": 28.58025550842285, "learning_rate": 6.4933333333333336e-06, "loss": 0.9168, "step": 11050 }, { "epoch": 88.408, "grad_norm": 23.198684692382812, "learning_rate": 6.488888888888888e-06, "loss": 0.8324, "step": 11051 }, { "epoch": 88.416, "grad_norm": 26.176483154296875, "learning_rate": 6.484444444444444e-06, "loss": 0.8842, "step": 11052 }, { "epoch": 88.424, "grad_norm": 21.97970962524414, "learning_rate": 6.48e-06, "loss": 0.6488, "step": 11053 }, { "epoch": 88.432, "grad_norm": 18.52307891845703, "learning_rate": 6.4755555555555555e-06, "loss": 0.8604, "step": 11054 }, { "epoch": 88.44, "grad_norm": 22.34872817993164, "learning_rate": 6.471111111111111e-06, "loss": 0.6603, "step": 11055 }, { "epoch": 88.448, "grad_norm": 114.0359878540039, "learning_rate": 6.466666666666667e-06, "loss": 0.8199, "step": 11056 }, { "epoch": 88.456, "grad_norm": 23.810701370239258, "learning_rate": 6.4622222222222226e-06, "loss": 1.0418, "step": 11057 }, { "epoch": 88.464, "grad_norm": 80.62480926513672, "learning_rate": 6.457777777777778e-06, "loss": 0.9635, "step": 11058 }, { "epoch": 88.472, "grad_norm": 19.44249153137207, "learning_rate": 6.453333333333333e-06, "loss": 0.7662, "step": 11059 }, { "epoch": 88.48, "grad_norm": 28.769426345825195, "learning_rate": 6.448888888888889e-06, "loss": 1.4739, "step": 11060 }, { "epoch": 88.488, "grad_norm": 14.94253921508789, "learning_rate": 6.4444444444444445e-06, "loss": 0.6699, "step": 11061 }, { "epoch": 88.496, "grad_norm": 23.245258331298828, "learning_rate": 6.44e-06, "loss": 0.6472, "step": 11062 }, { "epoch": 88.504, "grad_norm": 30.620250701904297, "learning_rate": 6.435555555555556e-06, "loss": 0.7533, "step": 11063 }, { "epoch": 88.512, "grad_norm": 17.5978946685791, "learning_rate": 6.4311111111111116e-06, "loss": 0.822, "step": 11064 }, { "epoch": 88.52, "grad_norm": 14.096466064453125, "learning_rate": 6.426666666666667e-06, "loss": 1.0727, "step": 11065 }, { "epoch": 88.528, "grad_norm": 22.171859741210938, "learning_rate": 6.422222222222223e-06, "loss": 0.7271, "step": 11066 }, { "epoch": 88.536, "grad_norm": 34.726070404052734, "learning_rate": 6.417777777777778e-06, "loss": 0.841, "step": 11067 }, { "epoch": 88.544, "grad_norm": 31.342050552368164, "learning_rate": 6.4133333333333335e-06, "loss": 1.0249, "step": 11068 }, { "epoch": 88.552, "grad_norm": 37.690406799316406, "learning_rate": 6.408888888888889e-06, "loss": 0.6264, "step": 11069 }, { "epoch": 88.56, "grad_norm": 30.22361183166504, "learning_rate": 6.404444444444445e-06, "loss": 0.9086, "step": 11070 }, { "epoch": 88.568, "grad_norm": 22.004344940185547, "learning_rate": 6.4000000000000006e-06, "loss": 1.2613, "step": 11071 }, { "epoch": 88.576, "grad_norm": 63.792667388916016, "learning_rate": 6.395555555555556e-06, "loss": 0.8815, "step": 11072 }, { "epoch": 88.584, "grad_norm": 19.712646484375, "learning_rate": 6.391111111111112e-06, "loss": 0.8777, "step": 11073 }, { "epoch": 88.592, "grad_norm": 18.31410026550293, "learning_rate": 6.386666666666667e-06, "loss": 1.0404, "step": 11074 }, { "epoch": 88.6, "grad_norm": 17.097299575805664, "learning_rate": 6.3822222222222225e-06, "loss": 0.6676, "step": 11075 }, { "epoch": 88.608, "grad_norm": 30.186080932617188, "learning_rate": 6.377777777777778e-06, "loss": 1.0128, "step": 11076 }, { "epoch": 88.616, "grad_norm": 50.45661163330078, "learning_rate": 6.373333333333334e-06, "loss": 0.9826, "step": 11077 }, { "epoch": 88.624, "grad_norm": 77.76384735107422, "learning_rate": 6.3688888888888896e-06, "loss": 1.8978, "step": 11078 }, { "epoch": 88.632, "grad_norm": 12.425677299499512, "learning_rate": 6.364444444444445e-06, "loss": 0.4632, "step": 11079 }, { "epoch": 88.64, "grad_norm": 19.25543212890625, "learning_rate": 6.360000000000001e-06, "loss": 0.7565, "step": 11080 }, { "epoch": 88.648, "grad_norm": 24.663991928100586, "learning_rate": 6.355555555555557e-06, "loss": 0.8766, "step": 11081 }, { "epoch": 88.656, "grad_norm": 50.330177307128906, "learning_rate": 6.3511111111111115e-06, "loss": 0.6653, "step": 11082 }, { "epoch": 88.664, "grad_norm": 24.11724281311035, "learning_rate": 6.346666666666667e-06, "loss": 0.6678, "step": 11083 }, { "epoch": 88.672, "grad_norm": 18.66835594177246, "learning_rate": 6.342222222222223e-06, "loss": 0.9408, "step": 11084 }, { "epoch": 88.68, "grad_norm": 47.689491271972656, "learning_rate": 6.3377777777777786e-06, "loss": 1.1228, "step": 11085 }, { "epoch": 88.688, "grad_norm": 32.48615646362305, "learning_rate": 6.333333333333334e-06, "loss": 0.7382, "step": 11086 }, { "epoch": 88.696, "grad_norm": 22.860971450805664, "learning_rate": 6.32888888888889e-06, "loss": 0.3715, "step": 11087 }, { "epoch": 88.704, "grad_norm": 34.26902389526367, "learning_rate": 6.324444444444446e-06, "loss": 0.6873, "step": 11088 }, { "epoch": 88.712, "grad_norm": 27.899206161499023, "learning_rate": 6.320000000000001e-06, "loss": 1.1823, "step": 11089 }, { "epoch": 88.72, "grad_norm": 26.883010864257812, "learning_rate": 6.315555555555556e-06, "loss": 0.6429, "step": 11090 }, { "epoch": 88.728, "grad_norm": 40.39302444458008, "learning_rate": 6.311111111111112e-06, "loss": 1.2785, "step": 11091 }, { "epoch": 88.736, "grad_norm": 23.659616470336914, "learning_rate": 6.306666666666666e-06, "loss": 0.6297, "step": 11092 }, { "epoch": 88.744, "grad_norm": 12.172417640686035, "learning_rate": 6.3022222222222216e-06, "loss": 0.9561, "step": 11093 }, { "epoch": 88.752, "grad_norm": 13.250406265258789, "learning_rate": 6.297777777777777e-06, "loss": 0.8081, "step": 11094 }, { "epoch": 88.76, "grad_norm": 46.28351974487305, "learning_rate": 6.293333333333333e-06, "loss": 1.0479, "step": 11095 }, { "epoch": 88.768, "grad_norm": 50.668121337890625, "learning_rate": 6.288888888888889e-06, "loss": 0.8304, "step": 11096 }, { "epoch": 88.776, "grad_norm": 13.132316589355469, "learning_rate": 6.284444444444444e-06, "loss": 0.9886, "step": 11097 }, { "epoch": 88.784, "grad_norm": 17.270639419555664, "learning_rate": 6.28e-06, "loss": 0.8119, "step": 11098 }, { "epoch": 88.792, "grad_norm": 15.267468452453613, "learning_rate": 6.275555555555556e-06, "loss": 0.588, "step": 11099 }, { "epoch": 88.8, "grad_norm": 27.815170288085938, "learning_rate": 6.2711111111111105e-06, "loss": 0.6082, "step": 11100 }, { "epoch": 88.808, "grad_norm": 17.258752822875977, "learning_rate": 6.266666666666666e-06, "loss": 0.5956, "step": 11101 }, { "epoch": 88.816, "grad_norm": 26.372255325317383, "learning_rate": 6.262222222222222e-06, "loss": 0.822, "step": 11102 }, { "epoch": 88.824, "grad_norm": 26.22181510925293, "learning_rate": 6.257777777777778e-06, "loss": 0.8795, "step": 11103 }, { "epoch": 88.832, "grad_norm": 27.948516845703125, "learning_rate": 6.253333333333333e-06, "loss": 0.5961, "step": 11104 }, { "epoch": 88.84, "grad_norm": 215.1540985107422, "learning_rate": 6.248888888888889e-06, "loss": 3.0549, "step": 11105 }, { "epoch": 88.848, "grad_norm": 22.801706314086914, "learning_rate": 6.244444444444445e-06, "loss": 0.5759, "step": 11106 }, { "epoch": 88.856, "grad_norm": 21.527812957763672, "learning_rate": 6.24e-06, "loss": 0.6568, "step": 11107 }, { "epoch": 88.864, "grad_norm": 13.503138542175293, "learning_rate": 6.235555555555555e-06, "loss": 0.6195, "step": 11108 }, { "epoch": 88.872, "grad_norm": 48.93589782714844, "learning_rate": 6.231111111111111e-06, "loss": 0.7803, "step": 11109 }, { "epoch": 88.88, "grad_norm": 44.31671905517578, "learning_rate": 6.226666666666667e-06, "loss": 0.8326, "step": 11110 }, { "epoch": 88.888, "grad_norm": 18.64962387084961, "learning_rate": 6.222222222222222e-06, "loss": 0.9478, "step": 11111 }, { "epoch": 88.896, "grad_norm": 20.395851135253906, "learning_rate": 6.217777777777778e-06, "loss": 0.7264, "step": 11112 }, { "epoch": 88.904, "grad_norm": 26.28460121154785, "learning_rate": 6.213333333333334e-06, "loss": 0.4815, "step": 11113 }, { "epoch": 88.912, "grad_norm": 24.437780380249023, "learning_rate": 6.208888888888889e-06, "loss": 0.8518, "step": 11114 }, { "epoch": 88.92, "grad_norm": 12.676926612854004, "learning_rate": 6.204444444444445e-06, "loss": 0.8974, "step": 11115 }, { "epoch": 88.928, "grad_norm": 44.170658111572266, "learning_rate": 6.2e-06, "loss": 0.7352, "step": 11116 }, { "epoch": 88.936, "grad_norm": 14.683019638061523, "learning_rate": 6.195555555555556e-06, "loss": 0.5646, "step": 11117 }, { "epoch": 88.944, "grad_norm": 20.907590866088867, "learning_rate": 6.191111111111111e-06, "loss": 0.7654, "step": 11118 }, { "epoch": 88.952, "grad_norm": 56.82463073730469, "learning_rate": 6.186666666666667e-06, "loss": 1.0386, "step": 11119 }, { "epoch": 88.96, "grad_norm": 36.69746398925781, "learning_rate": 6.182222222222223e-06, "loss": 0.6283, "step": 11120 }, { "epoch": 88.968, "grad_norm": 33.913734436035156, "learning_rate": 6.177777777777778e-06, "loss": 0.5538, "step": 11121 }, { "epoch": 88.976, "grad_norm": 18.159242630004883, "learning_rate": 6.173333333333334e-06, "loss": 0.4568, "step": 11122 }, { "epoch": 88.984, "grad_norm": 17.64800453186035, "learning_rate": 6.168888888888889e-06, "loss": 0.8555, "step": 11123 }, { "epoch": 88.992, "grad_norm": 24.48422622680664, "learning_rate": 6.164444444444445e-06, "loss": 1.0684, "step": 11124 }, { "epoch": 89.0, "grad_norm": 23.16597557067871, "learning_rate": 6.16e-06, "loss": 0.9056, "step": 11125 }, { "epoch": 89.0, "eval_loss": 0.9050135016441345, "eval_map": 0.4738, "eval_map_50": 0.8071, "eval_map_75": 0.4799, "eval_map_Coverall": 0.7038, "eval_map_Face_Shield": 0.57, "eval_map_Gloves": 0.395, "eval_map_Goggles": 0.257, "eval_map_Mask": 0.4432, "eval_map_large": 0.6758, "eval_map_medium": 0.3478, "eval_map_small": 0.4096, "eval_mar_1": 0.3549, "eval_mar_10": 0.5996, "eval_mar_100": 0.6101, "eval_mar_100_Coverall": 0.7756, "eval_mar_100_Face_Shield": 0.7118, "eval_mar_100_Gloves": 0.5098, "eval_mar_100_Goggles": 0.5188, "eval_mar_100_Mask": 0.5346, "eval_mar_large": 0.7941, "eval_mar_medium": 0.5253, "eval_mar_small": 0.4705, "eval_runtime": 0.9079, "eval_samples_per_second": 31.941, "eval_steps_per_second": 2.203, "step": 11125 }, { "epoch": 89.008, "grad_norm": 20.863718032836914, "learning_rate": 6.155555555555556e-06, "loss": 0.9692, "step": 11126 }, { "epoch": 89.016, "grad_norm": 33.57597732543945, "learning_rate": 6.151111111111112e-06, "loss": 1.9005, "step": 11127 }, { "epoch": 89.024, "grad_norm": 15.889655113220215, "learning_rate": 6.146666666666667e-06, "loss": 0.805, "step": 11128 }, { "epoch": 89.032, "grad_norm": 25.77906036376953, "learning_rate": 6.142222222222223e-06, "loss": 0.975, "step": 11129 }, { "epoch": 89.04, "grad_norm": 41.77212905883789, "learning_rate": 6.137777777777779e-06, "loss": 0.9661, "step": 11130 }, { "epoch": 89.048, "grad_norm": 123.79524993896484, "learning_rate": 6.133333333333334e-06, "loss": 0.5705, "step": 11131 }, { "epoch": 89.056, "grad_norm": 32.46966552734375, "learning_rate": 6.1288888888888885e-06, "loss": 0.6425, "step": 11132 }, { "epoch": 89.064, "grad_norm": 30.383934020996094, "learning_rate": 6.124444444444444e-06, "loss": 0.5121, "step": 11133 }, { "epoch": 89.072, "grad_norm": 13.397007942199707, "learning_rate": 6.12e-06, "loss": 0.9176, "step": 11134 }, { "epoch": 89.08, "grad_norm": 18.138105392456055, "learning_rate": 6.1155555555555555e-06, "loss": 0.6814, "step": 11135 }, { "epoch": 89.088, "grad_norm": 35.745941162109375, "learning_rate": 6.111111111111111e-06, "loss": 1.0004, "step": 11136 }, { "epoch": 89.096, "grad_norm": 34.10578536987305, "learning_rate": 6.106666666666667e-06, "loss": 0.4886, "step": 11137 }, { "epoch": 89.104, "grad_norm": 26.590858459472656, "learning_rate": 6.102222222222223e-06, "loss": 0.7442, "step": 11138 }, { "epoch": 89.112, "grad_norm": 19.208465576171875, "learning_rate": 6.097777777777778e-06, "loss": 0.6269, "step": 11139 }, { "epoch": 89.12, "grad_norm": 20.72344207763672, "learning_rate": 6.093333333333333e-06, "loss": 0.6763, "step": 11140 }, { "epoch": 89.128, "grad_norm": 14.64101505279541, "learning_rate": 6.088888888888889e-06, "loss": 0.8185, "step": 11141 }, { "epoch": 89.136, "grad_norm": 76.94631958007812, "learning_rate": 6.0844444444444445e-06, "loss": 0.8588, "step": 11142 }, { "epoch": 89.144, "grad_norm": 87.04525756835938, "learning_rate": 6.08e-06, "loss": 1.1154, "step": 11143 }, { "epoch": 89.152, "grad_norm": 15.41073226928711, "learning_rate": 6.075555555555556e-06, "loss": 0.7813, "step": 11144 }, { "epoch": 89.16, "grad_norm": 17.76821517944336, "learning_rate": 6.071111111111112e-06, "loss": 0.8081, "step": 11145 }, { "epoch": 89.168, "grad_norm": 28.553293228149414, "learning_rate": 6.066666666666667e-06, "loss": 1.1189, "step": 11146 }, { "epoch": 89.176, "grad_norm": 19.642595291137695, "learning_rate": 6.062222222222223e-06, "loss": 0.8724, "step": 11147 }, { "epoch": 89.184, "grad_norm": 23.00849723815918, "learning_rate": 6.057777777777778e-06, "loss": 0.9221, "step": 11148 }, { "epoch": 89.192, "grad_norm": 25.041154861450195, "learning_rate": 6.0533333333333335e-06, "loss": 0.8276, "step": 11149 }, { "epoch": 89.2, "grad_norm": 20.992958068847656, "learning_rate": 6.048888888888889e-06, "loss": 0.956, "step": 11150 }, { "epoch": 89.208, "grad_norm": 64.17286682128906, "learning_rate": 6.044444444444445e-06, "loss": 0.994, "step": 11151 }, { "epoch": 89.216, "grad_norm": 15.826366424560547, "learning_rate": 6.040000000000001e-06, "loss": 0.5654, "step": 11152 }, { "epoch": 89.224, "grad_norm": 22.464025497436523, "learning_rate": 6.035555555555556e-06, "loss": 1.0257, "step": 11153 }, { "epoch": 89.232, "grad_norm": 26.501611709594727, "learning_rate": 6.031111111111112e-06, "loss": 0.5933, "step": 11154 }, { "epoch": 89.24, "grad_norm": 35.70372009277344, "learning_rate": 6.026666666666667e-06, "loss": 0.7976, "step": 11155 }, { "epoch": 89.248, "grad_norm": 19.056026458740234, "learning_rate": 6.0222222222222225e-06, "loss": 0.6376, "step": 11156 }, { "epoch": 89.256, "grad_norm": 22.07050895690918, "learning_rate": 6.017777777777777e-06, "loss": 0.8083, "step": 11157 }, { "epoch": 89.264, "grad_norm": 48.26186752319336, "learning_rate": 6.013333333333333e-06, "loss": 0.731, "step": 11158 }, { "epoch": 89.272, "grad_norm": 27.223072052001953, "learning_rate": 6.008888888888889e-06, "loss": 0.8461, "step": 11159 }, { "epoch": 89.28, "grad_norm": 22.647218704223633, "learning_rate": 6.0044444444444445e-06, "loss": 0.7316, "step": 11160 }, { "epoch": 89.288, "grad_norm": 16.941251754760742, "learning_rate": 6e-06, "loss": 0.933, "step": 11161 }, { "epoch": 89.296, "grad_norm": 23.68075180053711, "learning_rate": 5.995555555555556e-06, "loss": 1.6474, "step": 11162 }, { "epoch": 89.304, "grad_norm": 21.005094528198242, "learning_rate": 5.9911111111111115e-06, "loss": 0.8752, "step": 11163 }, { "epoch": 89.312, "grad_norm": 22.438844680786133, "learning_rate": 5.986666666666667e-06, "loss": 0.8322, "step": 11164 }, { "epoch": 89.32, "grad_norm": 21.157100677490234, "learning_rate": 5.982222222222222e-06, "loss": 0.6551, "step": 11165 }, { "epoch": 89.328, "grad_norm": 22.924903869628906, "learning_rate": 5.977777777777778e-06, "loss": 0.8194, "step": 11166 }, { "epoch": 89.336, "grad_norm": 22.881263732910156, "learning_rate": 5.9733333333333335e-06, "loss": 0.4885, "step": 11167 }, { "epoch": 89.344, "grad_norm": 25.833696365356445, "learning_rate": 5.968888888888889e-06, "loss": 0.6476, "step": 11168 }, { "epoch": 89.352, "grad_norm": 24.409408569335938, "learning_rate": 5.964444444444445e-06, "loss": 0.9463, "step": 11169 }, { "epoch": 89.36, "grad_norm": 36.55443572998047, "learning_rate": 5.9600000000000005e-06, "loss": 1.5948, "step": 11170 }, { "epoch": 89.368, "grad_norm": 40.023433685302734, "learning_rate": 5.955555555555556e-06, "loss": 0.888, "step": 11171 }, { "epoch": 89.376, "grad_norm": 13.665249824523926, "learning_rate": 5.951111111111111e-06, "loss": 1.1595, "step": 11172 }, { "epoch": 89.384, "grad_norm": 28.041526794433594, "learning_rate": 5.946666666666667e-06, "loss": 0.9262, "step": 11173 }, { "epoch": 89.392, "grad_norm": 31.896596908569336, "learning_rate": 5.9422222222222225e-06, "loss": 0.6714, "step": 11174 }, { "epoch": 89.4, "grad_norm": 26.047103881835938, "learning_rate": 5.937777777777778e-06, "loss": 0.8643, "step": 11175 }, { "epoch": 89.408, "grad_norm": 23.32780647277832, "learning_rate": 5.933333333333334e-06, "loss": 0.9889, "step": 11176 }, { "epoch": 89.416, "grad_norm": 63.99555969238281, "learning_rate": 5.9288888888888895e-06, "loss": 0.8109, "step": 11177 }, { "epoch": 89.424, "grad_norm": 20.841543197631836, "learning_rate": 5.924444444444445e-06, "loss": 0.6843, "step": 11178 }, { "epoch": 89.432, "grad_norm": 16.330642700195312, "learning_rate": 5.920000000000001e-06, "loss": 0.6269, "step": 11179 }, { "epoch": 89.44, "grad_norm": 21.118057250976562, "learning_rate": 5.915555555555556e-06, "loss": 0.6751, "step": 11180 }, { "epoch": 89.448, "grad_norm": 37.28536605834961, "learning_rate": 5.9111111111111115e-06, "loss": 0.8627, "step": 11181 }, { "epoch": 89.456, "grad_norm": 25.161285400390625, "learning_rate": 5.906666666666667e-06, "loss": 0.5913, "step": 11182 }, { "epoch": 89.464, "grad_norm": 56.21922302246094, "learning_rate": 5.902222222222222e-06, "loss": 0.9523, "step": 11183 }, { "epoch": 89.472, "grad_norm": 18.953929901123047, "learning_rate": 5.897777777777778e-06, "loss": 0.667, "step": 11184 }, { "epoch": 89.48, "grad_norm": 24.119239807128906, "learning_rate": 5.893333333333333e-06, "loss": 0.846, "step": 11185 }, { "epoch": 89.488, "grad_norm": 34.886131286621094, "learning_rate": 5.888888888888889e-06, "loss": 0.8568, "step": 11186 }, { "epoch": 89.496, "grad_norm": 13.662029266357422, "learning_rate": 5.884444444444445e-06, "loss": 0.5776, "step": 11187 }, { "epoch": 89.504, "grad_norm": 22.198768615722656, "learning_rate": 5.8800000000000005e-06, "loss": 0.7482, "step": 11188 }, { "epoch": 89.512, "grad_norm": 84.9611587524414, "learning_rate": 5.875555555555555e-06, "loss": 1.9103, "step": 11189 }, { "epoch": 89.52, "grad_norm": 21.286678314208984, "learning_rate": 5.871111111111111e-06, "loss": 1.0662, "step": 11190 }, { "epoch": 89.528, "grad_norm": 16.044090270996094, "learning_rate": 5.866666666666667e-06, "loss": 0.9777, "step": 11191 }, { "epoch": 89.536, "grad_norm": 14.2781982421875, "learning_rate": 5.862222222222222e-06, "loss": 0.7023, "step": 11192 }, { "epoch": 89.544, "grad_norm": 18.846946716308594, "learning_rate": 5.857777777777778e-06, "loss": 0.7899, "step": 11193 }, { "epoch": 89.552, "grad_norm": 11.076783180236816, "learning_rate": 5.853333333333334e-06, "loss": 0.7086, "step": 11194 }, { "epoch": 89.56, "grad_norm": 28.76575469970703, "learning_rate": 5.8488888888888895e-06, "loss": 0.7357, "step": 11195 }, { "epoch": 89.568, "grad_norm": 18.6406307220459, "learning_rate": 5.844444444444445e-06, "loss": 0.7746, "step": 11196 }, { "epoch": 89.576, "grad_norm": 49.75046157836914, "learning_rate": 5.84e-06, "loss": 0.9105, "step": 11197 }, { "epoch": 89.584, "grad_norm": 18.089698791503906, "learning_rate": 5.835555555555556e-06, "loss": 0.8971, "step": 11198 }, { "epoch": 89.592, "grad_norm": 23.79015350341797, "learning_rate": 5.831111111111111e-06, "loss": 0.7889, "step": 11199 }, { "epoch": 89.6, "grad_norm": 22.44043731689453, "learning_rate": 5.826666666666667e-06, "loss": 0.729, "step": 11200 }, { "epoch": 89.608, "grad_norm": 25.065290451049805, "learning_rate": 5.822222222222223e-06, "loss": 0.9039, "step": 11201 }, { "epoch": 89.616, "grad_norm": 28.685626983642578, "learning_rate": 5.8177777777777785e-06, "loss": 0.9904, "step": 11202 }, { "epoch": 89.624, "grad_norm": 44.234561920166016, "learning_rate": 5.813333333333334e-06, "loss": 0.6856, "step": 11203 }, { "epoch": 89.632, "grad_norm": 19.159921646118164, "learning_rate": 5.808888888888889e-06, "loss": 0.6921, "step": 11204 }, { "epoch": 89.64, "grad_norm": 20.698678970336914, "learning_rate": 5.804444444444445e-06, "loss": 0.7747, "step": 11205 }, { "epoch": 89.648, "grad_norm": 24.102737426757812, "learning_rate": 5.8e-06, "loss": 0.8286, "step": 11206 }, { "epoch": 89.656, "grad_norm": 26.045949935913086, "learning_rate": 5.795555555555556e-06, "loss": 1.0712, "step": 11207 }, { "epoch": 89.664, "grad_norm": 248.75201416015625, "learning_rate": 5.791111111111111e-06, "loss": 0.7682, "step": 11208 }, { "epoch": 89.672, "grad_norm": 22.626934051513672, "learning_rate": 5.786666666666667e-06, "loss": 0.6856, "step": 11209 }, { "epoch": 89.68, "grad_norm": 15.33563232421875, "learning_rate": 5.782222222222222e-06, "loss": 1.1558, "step": 11210 }, { "epoch": 89.688, "grad_norm": 57.1354866027832, "learning_rate": 5.777777777777778e-06, "loss": 0.8443, "step": 11211 }, { "epoch": 89.696, "grad_norm": 21.09770393371582, "learning_rate": 5.773333333333334e-06, "loss": 0.6548, "step": 11212 }, { "epoch": 89.704, "grad_norm": 23.38336753845215, "learning_rate": 5.768888888888889e-06, "loss": 0.704, "step": 11213 }, { "epoch": 89.712, "grad_norm": 80.67179107666016, "learning_rate": 5.764444444444444e-06, "loss": 0.7821, "step": 11214 }, { "epoch": 89.72, "grad_norm": 36.85899353027344, "learning_rate": 5.76e-06, "loss": 0.9008, "step": 11215 }, { "epoch": 89.728, "grad_norm": 30.650949478149414, "learning_rate": 5.755555555555556e-06, "loss": 1.2435, "step": 11216 }, { "epoch": 89.736, "grad_norm": 30.440122604370117, "learning_rate": 5.751111111111111e-06, "loss": 0.7318, "step": 11217 }, { "epoch": 89.744, "grad_norm": 21.794492721557617, "learning_rate": 5.746666666666667e-06, "loss": 1.0119, "step": 11218 }, { "epoch": 89.752, "grad_norm": 23.956459045410156, "learning_rate": 5.742222222222223e-06, "loss": 0.5465, "step": 11219 }, { "epoch": 89.76, "grad_norm": 20.124319076538086, "learning_rate": 5.737777777777778e-06, "loss": 0.6528, "step": 11220 }, { "epoch": 89.768, "grad_norm": 21.8857364654541, "learning_rate": 5.733333333333333e-06, "loss": 1.1407, "step": 11221 }, { "epoch": 89.776, "grad_norm": 34.80328369140625, "learning_rate": 5.728888888888889e-06, "loss": 0.7121, "step": 11222 }, { "epoch": 89.784, "grad_norm": 18.266357421875, "learning_rate": 5.724444444444445e-06, "loss": 0.9983, "step": 11223 }, { "epoch": 89.792, "grad_norm": 20.677453994750977, "learning_rate": 5.72e-06, "loss": 0.6652, "step": 11224 }, { "epoch": 89.8, "grad_norm": 52.10688781738281, "learning_rate": 5.715555555555556e-06, "loss": 0.6362, "step": 11225 }, { "epoch": 89.808, "grad_norm": 30.259246826171875, "learning_rate": 5.711111111111112e-06, "loss": 0.6394, "step": 11226 }, { "epoch": 89.816, "grad_norm": 15.610440254211426, "learning_rate": 5.706666666666667e-06, "loss": 0.8863, "step": 11227 }, { "epoch": 89.824, "grad_norm": 12.124263763427734, "learning_rate": 5.702222222222223e-06, "loss": 0.5303, "step": 11228 }, { "epoch": 89.832, "grad_norm": 17.520496368408203, "learning_rate": 5.697777777777778e-06, "loss": 0.5829, "step": 11229 }, { "epoch": 89.84, "grad_norm": 14.4632568359375, "learning_rate": 5.693333333333334e-06, "loss": 0.6546, "step": 11230 }, { "epoch": 89.848, "grad_norm": 35.443267822265625, "learning_rate": 5.688888888888889e-06, "loss": 1.0699, "step": 11231 }, { "epoch": 89.856, "grad_norm": 17.844593048095703, "learning_rate": 5.684444444444445e-06, "loss": 0.8341, "step": 11232 }, { "epoch": 89.864, "grad_norm": 38.178504943847656, "learning_rate": 5.680000000000001e-06, "loss": 0.5956, "step": 11233 }, { "epoch": 89.872, "grad_norm": 26.720792770385742, "learning_rate": 5.6755555555555555e-06, "loss": 0.5291, "step": 11234 }, { "epoch": 89.88, "grad_norm": 25.355335235595703, "learning_rate": 5.671111111111111e-06, "loss": 0.7892, "step": 11235 }, { "epoch": 89.888, "grad_norm": 16.708621978759766, "learning_rate": 5.666666666666667e-06, "loss": 0.8616, "step": 11236 }, { "epoch": 89.896, "grad_norm": 12.839332580566406, "learning_rate": 5.662222222222223e-06, "loss": 0.7186, "step": 11237 }, { "epoch": 89.904, "grad_norm": 26.960519790649414, "learning_rate": 5.6577777777777774e-06, "loss": 0.8434, "step": 11238 }, { "epoch": 89.912, "grad_norm": 21.66694450378418, "learning_rate": 5.653333333333333e-06, "loss": 1.0692, "step": 11239 }, { "epoch": 89.92, "grad_norm": 48.14466094970703, "learning_rate": 5.648888888888889e-06, "loss": 0.5534, "step": 11240 }, { "epoch": 89.928, "grad_norm": 51.57549285888672, "learning_rate": 5.6444444444444445e-06, "loss": 0.7673, "step": 11241 }, { "epoch": 89.936, "grad_norm": 71.47799682617188, "learning_rate": 5.64e-06, "loss": 2.5795, "step": 11242 }, { "epoch": 89.944, "grad_norm": 14.145790100097656, "learning_rate": 5.635555555555556e-06, "loss": 0.7186, "step": 11243 }, { "epoch": 89.952, "grad_norm": 27.529626846313477, "learning_rate": 5.631111111111112e-06, "loss": 1.216, "step": 11244 }, { "epoch": 89.96, "grad_norm": 15.359943389892578, "learning_rate": 5.626666666666667e-06, "loss": 0.7908, "step": 11245 }, { "epoch": 89.968, "grad_norm": 29.74968147277832, "learning_rate": 5.622222222222222e-06, "loss": 1.1507, "step": 11246 }, { "epoch": 89.976, "grad_norm": 30.03458595275879, "learning_rate": 5.617777777777778e-06, "loss": 0.5798, "step": 11247 }, { "epoch": 89.984, "grad_norm": 18.33134651184082, "learning_rate": 5.6133333333333335e-06, "loss": 0.7228, "step": 11248 }, { "epoch": 89.992, "grad_norm": 36.8171272277832, "learning_rate": 5.608888888888889e-06, "loss": 0.867, "step": 11249 }, { "epoch": 90.0, "grad_norm": 37.756839752197266, "learning_rate": 5.604444444444445e-06, "loss": 1.5538, "step": 11250 }, { "epoch": 90.0, "eval_loss": 0.900014340877533, "eval_map": 0.4857, "eval_map_50": 0.8222, "eval_map_75": 0.5258, "eval_map_Coverall": 0.687, "eval_map_Face_Shield": 0.6256, "eval_map_Gloves": 0.3978, "eval_map_Goggles": 0.302, "eval_map_Mask": 0.4161, "eval_map_large": 0.68, "eval_map_medium": 0.3784, "eval_map_small": 0.4271, "eval_mar_1": 0.3617, "eval_mar_10": 0.5984, "eval_mar_100": 0.6067, "eval_mar_100_Coverall": 0.7733, "eval_mar_100_Face_Shield": 0.7176, "eval_mar_100_Gloves": 0.5131, "eval_mar_100_Goggles": 0.5219, "eval_mar_100_Mask": 0.5077, "eval_mar_large": 0.7835, "eval_mar_medium": 0.534, "eval_mar_small": 0.492, "eval_runtime": 0.9274, "eval_samples_per_second": 31.271, "eval_steps_per_second": 2.157, "step": 11250 }, { "epoch": 90.008, "grad_norm": 31.6571102142334, "learning_rate": 5.600000000000001e-06, "loss": 0.8117, "step": 11251 }, { "epoch": 90.016, "grad_norm": 33.678470611572266, "learning_rate": 5.595555555555556e-06, "loss": 1.0049, "step": 11252 }, { "epoch": 90.024, "grad_norm": 21.764087677001953, "learning_rate": 5.591111111111111e-06, "loss": 0.8288, "step": 11253 }, { "epoch": 90.032, "grad_norm": 27.667068481445312, "learning_rate": 5.586666666666667e-06, "loss": 0.9308, "step": 11254 }, { "epoch": 90.04, "grad_norm": 36.8610954284668, "learning_rate": 5.5822222222222225e-06, "loss": 0.7896, "step": 11255 }, { "epoch": 90.048, "grad_norm": 40.64298629760742, "learning_rate": 5.577777777777778e-06, "loss": 0.6225, "step": 11256 }, { "epoch": 90.056, "grad_norm": 24.447444915771484, "learning_rate": 5.573333333333334e-06, "loss": 0.6669, "step": 11257 }, { "epoch": 90.064, "grad_norm": 14.95251750946045, "learning_rate": 5.56888888888889e-06, "loss": 0.7482, "step": 11258 }, { "epoch": 90.072, "grad_norm": 47.42503356933594, "learning_rate": 5.564444444444445e-06, "loss": 0.812, "step": 11259 }, { "epoch": 90.08, "grad_norm": 43.076961517333984, "learning_rate": 5.56e-06, "loss": 0.5068, "step": 11260 }, { "epoch": 90.088, "grad_norm": 29.5488338470459, "learning_rate": 5.555555555555556e-06, "loss": 0.6864, "step": 11261 }, { "epoch": 90.096, "grad_norm": 17.779647827148438, "learning_rate": 5.5511111111111115e-06, "loss": 0.6784, "step": 11262 }, { "epoch": 90.104, "grad_norm": 25.638296127319336, "learning_rate": 5.546666666666666e-06, "loss": 0.6954, "step": 11263 }, { "epoch": 90.112, "grad_norm": 32.379119873046875, "learning_rate": 5.542222222222222e-06, "loss": 0.8695, "step": 11264 }, { "epoch": 90.12, "grad_norm": 33.915584564208984, "learning_rate": 5.537777777777778e-06, "loss": 0.9506, "step": 11265 }, { "epoch": 90.128, "grad_norm": 22.561019897460938, "learning_rate": 5.5333333333333334e-06, "loss": 0.5376, "step": 11266 }, { "epoch": 90.136, "grad_norm": 20.558094024658203, "learning_rate": 5.528888888888889e-06, "loss": 0.7175, "step": 11267 }, { "epoch": 90.144, "grad_norm": 20.037752151489258, "learning_rate": 5.524444444444445e-06, "loss": 0.7725, "step": 11268 }, { "epoch": 90.152, "grad_norm": 20.367599487304688, "learning_rate": 5.5200000000000005e-06, "loss": 0.4851, "step": 11269 }, { "epoch": 90.16, "grad_norm": 14.66450309753418, "learning_rate": 5.515555555555555e-06, "loss": 0.7567, "step": 11270 }, { "epoch": 90.168, "grad_norm": 14.465580940246582, "learning_rate": 5.511111111111111e-06, "loss": 0.9925, "step": 11271 }, { "epoch": 90.176, "grad_norm": 22.826536178588867, "learning_rate": 5.506666666666667e-06, "loss": 0.5654, "step": 11272 }, { "epoch": 90.184, "grad_norm": 37.25182342529297, "learning_rate": 5.5022222222222224e-06, "loss": 0.7285, "step": 11273 }, { "epoch": 90.192, "grad_norm": 19.658538818359375, "learning_rate": 5.497777777777778e-06, "loss": 1.1247, "step": 11274 }, { "epoch": 90.2, "grad_norm": 90.92622375488281, "learning_rate": 5.493333333333334e-06, "loss": 0.8678, "step": 11275 }, { "epoch": 90.208, "grad_norm": 15.36094856262207, "learning_rate": 5.4888888888888895e-06, "loss": 1.0033, "step": 11276 }, { "epoch": 90.216, "grad_norm": 73.30670928955078, "learning_rate": 5.484444444444445e-06, "loss": 0.7433, "step": 11277 }, { "epoch": 90.224, "grad_norm": 71.58624267578125, "learning_rate": 5.48e-06, "loss": 1.1398, "step": 11278 }, { "epoch": 90.232, "grad_norm": 24.001060485839844, "learning_rate": 5.475555555555556e-06, "loss": 1.0535, "step": 11279 }, { "epoch": 90.24, "grad_norm": 11.891676902770996, "learning_rate": 5.4711111111111114e-06, "loss": 0.7185, "step": 11280 }, { "epoch": 90.248, "grad_norm": 280.67340087890625, "learning_rate": 5.466666666666667e-06, "loss": 0.7215, "step": 11281 }, { "epoch": 90.256, "grad_norm": 19.548307418823242, "learning_rate": 5.462222222222223e-06, "loss": 1.0161, "step": 11282 }, { "epoch": 90.264, "grad_norm": 14.288290023803711, "learning_rate": 5.4577777777777785e-06, "loss": 1.0294, "step": 11283 }, { "epoch": 90.272, "grad_norm": 31.553882598876953, "learning_rate": 5.453333333333334e-06, "loss": 0.8185, "step": 11284 }, { "epoch": 90.28, "grad_norm": 17.811609268188477, "learning_rate": 5.44888888888889e-06, "loss": 0.4137, "step": 11285 }, { "epoch": 90.288, "grad_norm": 18.01226806640625, "learning_rate": 5.444444444444445e-06, "loss": 0.7051, "step": 11286 }, { "epoch": 90.296, "grad_norm": 17.692962646484375, "learning_rate": 5.44e-06, "loss": 0.9634, "step": 11287 }, { "epoch": 90.304, "grad_norm": 14.650715827941895, "learning_rate": 5.435555555555555e-06, "loss": 0.6755, "step": 11288 }, { "epoch": 90.312, "grad_norm": 38.73143768310547, "learning_rate": 5.431111111111111e-06, "loss": 1.5558, "step": 11289 }, { "epoch": 90.32, "grad_norm": 35.514774322509766, "learning_rate": 5.426666666666667e-06, "loss": 1.3383, "step": 11290 }, { "epoch": 90.328, "grad_norm": 17.904727935791016, "learning_rate": 5.422222222222222e-06, "loss": 0.6869, "step": 11291 }, { "epoch": 90.336, "grad_norm": 27.222349166870117, "learning_rate": 5.417777777777778e-06, "loss": 0.5984, "step": 11292 }, { "epoch": 90.344, "grad_norm": 97.62354278564453, "learning_rate": 5.413333333333334e-06, "loss": 0.8214, "step": 11293 }, { "epoch": 90.352, "grad_norm": 25.657331466674805, "learning_rate": 5.4088888888888894e-06, "loss": 0.5701, "step": 11294 }, { "epoch": 90.36, "grad_norm": 26.999250411987305, "learning_rate": 5.404444444444444e-06, "loss": 1.083, "step": 11295 }, { "epoch": 90.368, "grad_norm": 25.993083953857422, "learning_rate": 5.4e-06, "loss": 0.9788, "step": 11296 }, { "epoch": 90.376, "grad_norm": 34.02251434326172, "learning_rate": 5.395555555555556e-06, "loss": 0.7159, "step": 11297 }, { "epoch": 90.384, "grad_norm": 12.700831413269043, "learning_rate": 5.391111111111111e-06, "loss": 0.7234, "step": 11298 }, { "epoch": 90.392, "grad_norm": 39.29998779296875, "learning_rate": 5.386666666666667e-06, "loss": 0.6283, "step": 11299 }, { "epoch": 90.4, "grad_norm": 17.232500076293945, "learning_rate": 5.382222222222223e-06, "loss": 0.984, "step": 11300 }, { "epoch": 90.408, "grad_norm": 15.641718864440918, "learning_rate": 5.3777777777777784e-06, "loss": 0.6982, "step": 11301 }, { "epoch": 90.416, "grad_norm": 31.73773765563965, "learning_rate": 5.373333333333333e-06, "loss": 0.6656, "step": 11302 }, { "epoch": 90.424, "grad_norm": 40.024658203125, "learning_rate": 5.368888888888889e-06, "loss": 1.2651, "step": 11303 }, { "epoch": 90.432, "grad_norm": 22.937036514282227, "learning_rate": 5.364444444444445e-06, "loss": 0.8854, "step": 11304 }, { "epoch": 90.44, "grad_norm": 78.23667907714844, "learning_rate": 5.36e-06, "loss": 0.808, "step": 11305 }, { "epoch": 90.448, "grad_norm": 21.220945358276367, "learning_rate": 5.355555555555556e-06, "loss": 0.9116, "step": 11306 }, { "epoch": 90.456, "grad_norm": 44.80083465576172, "learning_rate": 5.351111111111112e-06, "loss": 1.2583, "step": 11307 }, { "epoch": 90.464, "grad_norm": 42.76448440551758, "learning_rate": 5.3466666666666674e-06, "loss": 1.0918, "step": 11308 }, { "epoch": 90.472, "grad_norm": 47.592735290527344, "learning_rate": 5.342222222222223e-06, "loss": 0.8916, "step": 11309 }, { "epoch": 90.48, "grad_norm": 19.07038116455078, "learning_rate": 5.337777777777778e-06, "loss": 0.7419, "step": 11310 }, { "epoch": 90.488, "grad_norm": 17.33692741394043, "learning_rate": 5.333333333333334e-06, "loss": 0.7199, "step": 11311 }, { "epoch": 90.496, "grad_norm": 30.51642417907715, "learning_rate": 5.3288888888888885e-06, "loss": 0.8013, "step": 11312 }, { "epoch": 90.504, "grad_norm": 22.172178268432617, "learning_rate": 5.324444444444444e-06, "loss": 0.7363, "step": 11313 }, { "epoch": 90.512, "grad_norm": 34.674964904785156, "learning_rate": 5.32e-06, "loss": 0.7833, "step": 11314 }, { "epoch": 90.52, "grad_norm": 19.1398983001709, "learning_rate": 5.315555555555556e-06, "loss": 0.7517, "step": 11315 }, { "epoch": 90.528, "grad_norm": 40.39659881591797, "learning_rate": 5.311111111111111e-06, "loss": 0.7182, "step": 11316 }, { "epoch": 90.536, "grad_norm": 18.716672897338867, "learning_rate": 5.306666666666667e-06, "loss": 0.7623, "step": 11317 }, { "epoch": 90.544, "grad_norm": 30.870481491088867, "learning_rate": 5.302222222222223e-06, "loss": 0.736, "step": 11318 }, { "epoch": 90.552, "grad_norm": 16.164657592773438, "learning_rate": 5.2977777777777775e-06, "loss": 0.8021, "step": 11319 }, { "epoch": 90.56, "grad_norm": 15.686073303222656, "learning_rate": 5.293333333333333e-06, "loss": 0.5974, "step": 11320 }, { "epoch": 90.568, "grad_norm": 20.70954704284668, "learning_rate": 5.288888888888889e-06, "loss": 0.5003, "step": 11321 }, { "epoch": 90.576, "grad_norm": 256.09747314453125, "learning_rate": 5.284444444444445e-06, "loss": 2.3031, "step": 11322 }, { "epoch": 90.584, "grad_norm": 17.112163543701172, "learning_rate": 5.28e-06, "loss": 0.6877, "step": 11323 }, { "epoch": 90.592, "grad_norm": 26.72458839416504, "learning_rate": 5.275555555555556e-06, "loss": 0.5723, "step": 11324 }, { "epoch": 90.6, "grad_norm": 35.56977081298828, "learning_rate": 5.271111111111112e-06, "loss": 1.0041, "step": 11325 }, { "epoch": 90.608, "grad_norm": 20.87982177734375, "learning_rate": 5.266666666666667e-06, "loss": 0.792, "step": 11326 }, { "epoch": 90.616, "grad_norm": 26.207717895507812, "learning_rate": 5.262222222222222e-06, "loss": 0.9877, "step": 11327 }, { "epoch": 90.624, "grad_norm": 22.79918098449707, "learning_rate": 5.257777777777778e-06, "loss": 0.9289, "step": 11328 }, { "epoch": 90.632, "grad_norm": 19.943578720092773, "learning_rate": 5.2533333333333336e-06, "loss": 0.4197, "step": 11329 }, { "epoch": 90.64, "grad_norm": 35.034664154052734, "learning_rate": 5.248888888888889e-06, "loss": 1.5012, "step": 11330 }, { "epoch": 90.648, "grad_norm": 25.82729148864746, "learning_rate": 5.244444444444445e-06, "loss": 0.9088, "step": 11331 }, { "epoch": 90.656, "grad_norm": 24.209518432617188, "learning_rate": 5.240000000000001e-06, "loss": 0.6882, "step": 11332 }, { "epoch": 90.664, "grad_norm": 130.59027099609375, "learning_rate": 5.235555555555556e-06, "loss": 0.4866, "step": 11333 }, { "epoch": 90.672, "grad_norm": 30.367937088012695, "learning_rate": 5.231111111111112e-06, "loss": 0.6224, "step": 11334 }, { "epoch": 90.68, "grad_norm": 21.818693161010742, "learning_rate": 5.226666666666667e-06, "loss": 1.3102, "step": 11335 }, { "epoch": 90.688, "grad_norm": 105.11640930175781, "learning_rate": 5.2222222222222226e-06, "loss": 0.5869, "step": 11336 }, { "epoch": 90.696, "grad_norm": 14.595504760742188, "learning_rate": 5.217777777777777e-06, "loss": 0.7212, "step": 11337 }, { "epoch": 90.704, "grad_norm": 33.726402282714844, "learning_rate": 5.213333333333333e-06, "loss": 1.5953, "step": 11338 }, { "epoch": 90.712, "grad_norm": 18.329164505004883, "learning_rate": 5.208888888888889e-06, "loss": 0.8007, "step": 11339 }, { "epoch": 90.72, "grad_norm": 73.63622283935547, "learning_rate": 5.2044444444444445e-06, "loss": 0.9632, "step": 11340 }, { "epoch": 90.728, "grad_norm": 32.01656723022461, "learning_rate": 5.2e-06, "loss": 0.6457, "step": 11341 }, { "epoch": 90.736, "grad_norm": 60.476722717285156, "learning_rate": 5.195555555555556e-06, "loss": 0.6995, "step": 11342 }, { "epoch": 90.744, "grad_norm": 30.422115325927734, "learning_rate": 5.1911111111111116e-06, "loss": 0.8517, "step": 11343 }, { "epoch": 90.752, "grad_norm": 13.978432655334473, "learning_rate": 5.186666666666666e-06, "loss": 0.9077, "step": 11344 }, { "epoch": 90.76, "grad_norm": 15.44845199584961, "learning_rate": 5.182222222222222e-06, "loss": 0.831, "step": 11345 }, { "epoch": 90.768, "grad_norm": 19.08966064453125, "learning_rate": 5.177777777777778e-06, "loss": 0.966, "step": 11346 }, { "epoch": 90.776, "grad_norm": 11.943199157714844, "learning_rate": 5.1733333333333335e-06, "loss": 0.967, "step": 11347 }, { "epoch": 90.784, "grad_norm": 14.032170295715332, "learning_rate": 5.168888888888889e-06, "loss": 0.7411, "step": 11348 }, { "epoch": 90.792, "grad_norm": 22.338186264038086, "learning_rate": 5.164444444444445e-06, "loss": 1.9998, "step": 11349 }, { "epoch": 90.8, "grad_norm": 33.207035064697266, "learning_rate": 5.1600000000000006e-06, "loss": 1.1208, "step": 11350 }, { "epoch": 90.808, "grad_norm": 66.41706085205078, "learning_rate": 5.155555555555555e-06, "loss": 0.5944, "step": 11351 }, { "epoch": 90.816, "grad_norm": 15.900662422180176, "learning_rate": 5.151111111111111e-06, "loss": 0.994, "step": 11352 }, { "epoch": 90.824, "grad_norm": 28.061538696289062, "learning_rate": 5.146666666666667e-06, "loss": 0.7678, "step": 11353 }, { "epoch": 90.832, "grad_norm": 19.051136016845703, "learning_rate": 5.1422222222222225e-06, "loss": 0.7195, "step": 11354 }, { "epoch": 90.84, "grad_norm": 15.118008613586426, "learning_rate": 5.137777777777778e-06, "loss": 0.6364, "step": 11355 }, { "epoch": 90.848, "grad_norm": 43.93933868408203, "learning_rate": 5.133333333333334e-06, "loss": 0.7523, "step": 11356 }, { "epoch": 90.856, "grad_norm": 15.382637023925781, "learning_rate": 5.1288888888888896e-06, "loss": 0.922, "step": 11357 }, { "epoch": 90.864, "grad_norm": 28.04192352294922, "learning_rate": 5.124444444444445e-06, "loss": 0.8738, "step": 11358 }, { "epoch": 90.872, "grad_norm": 23.47163963317871, "learning_rate": 5.12e-06, "loss": 0.6377, "step": 11359 }, { "epoch": 90.88, "grad_norm": 25.641490936279297, "learning_rate": 5.115555555555556e-06, "loss": 0.7086, "step": 11360 }, { "epoch": 90.888, "grad_norm": 71.25901794433594, "learning_rate": 5.1111111111111115e-06, "loss": 0.8471, "step": 11361 }, { "epoch": 90.896, "grad_norm": 35.30994415283203, "learning_rate": 5.106666666666667e-06, "loss": 1.5859, "step": 11362 }, { "epoch": 90.904, "grad_norm": 23.566858291625977, "learning_rate": 5.102222222222222e-06, "loss": 0.7172, "step": 11363 }, { "epoch": 90.912, "grad_norm": 114.80644226074219, "learning_rate": 5.097777777777778e-06, "loss": 0.7183, "step": 11364 }, { "epoch": 90.92, "grad_norm": 18.202655792236328, "learning_rate": 5.093333333333333e-06, "loss": 0.6626, "step": 11365 }, { "epoch": 90.928, "grad_norm": 17.32012367248535, "learning_rate": 5.088888888888889e-06, "loss": 0.6756, "step": 11366 }, { "epoch": 90.936, "grad_norm": 29.833913803100586, "learning_rate": 5.084444444444445e-06, "loss": 0.8785, "step": 11367 }, { "epoch": 90.944, "grad_norm": 15.177255630493164, "learning_rate": 5.08e-06, "loss": 0.5284, "step": 11368 }, { "epoch": 90.952, "grad_norm": 27.57533836364746, "learning_rate": 5.075555555555555e-06, "loss": 0.8079, "step": 11369 }, { "epoch": 90.96, "grad_norm": 20.544334411621094, "learning_rate": 5.071111111111111e-06, "loss": 0.6342, "step": 11370 }, { "epoch": 90.968, "grad_norm": 19.374988555908203, "learning_rate": 5.066666666666667e-06, "loss": 0.6261, "step": 11371 }, { "epoch": 90.976, "grad_norm": 26.48375129699707, "learning_rate": 5.062222222222222e-06, "loss": 0.5046, "step": 11372 }, { "epoch": 90.984, "grad_norm": 21.713136672973633, "learning_rate": 5.057777777777778e-06, "loss": 0.907, "step": 11373 }, { "epoch": 90.992, "grad_norm": 19.931140899658203, "learning_rate": 5.053333333333334e-06, "loss": 0.5952, "step": 11374 }, { "epoch": 91.0, "grad_norm": 20.677091598510742, "learning_rate": 5.0488888888888895e-06, "loss": 0.6612, "step": 11375 }, { "epoch": 91.0, "eval_loss": 0.9138544201850891, "eval_map": 0.4858, "eval_map_50": 0.8333, "eval_map_75": 0.4978, "eval_map_Coverall": 0.6888, "eval_map_Face_Shield": 0.6141, "eval_map_Gloves": 0.3743, "eval_map_Goggles": 0.2969, "eval_map_Mask": 0.4545, "eval_map_large": 0.6889, "eval_map_medium": 0.3494, "eval_map_small": 0.4247, "eval_mar_1": 0.3567, "eval_mar_10": 0.6033, "eval_mar_100": 0.6125, "eval_mar_100_Coverall": 0.7756, "eval_mar_100_Face_Shield": 0.7294, "eval_mar_100_Gloves": 0.4984, "eval_mar_100_Goggles": 0.5188, "eval_mar_100_Mask": 0.5404, "eval_mar_large": 0.8026, "eval_mar_medium": 0.5003, "eval_mar_small": 0.4824, "eval_runtime": 0.9368, "eval_samples_per_second": 30.955, "eval_steps_per_second": 2.135, "step": 11375 }, { "epoch": 91.008, "grad_norm": 22.991622924804688, "learning_rate": 5.044444444444444e-06, "loss": 1.0004, "step": 11376 }, { "epoch": 91.016, "grad_norm": 17.164962768554688, "learning_rate": 5.04e-06, "loss": 0.6892, "step": 11377 }, { "epoch": 91.024, "grad_norm": 49.3266716003418, "learning_rate": 5.035555555555556e-06, "loss": 0.8702, "step": 11378 }, { "epoch": 91.032, "grad_norm": 20.289798736572266, "learning_rate": 5.031111111111111e-06, "loss": 0.6526, "step": 11379 }, { "epoch": 91.04, "grad_norm": 29.314435958862305, "learning_rate": 5.026666666666667e-06, "loss": 0.6483, "step": 11380 }, { "epoch": 91.048, "grad_norm": 37.24995803833008, "learning_rate": 5.022222222222223e-06, "loss": 1.0157, "step": 11381 }, { "epoch": 91.056, "grad_norm": 19.201759338378906, "learning_rate": 5.0177777777777785e-06, "loss": 0.9141, "step": 11382 }, { "epoch": 91.064, "grad_norm": 32.802730560302734, "learning_rate": 5.013333333333334e-06, "loss": 0.6217, "step": 11383 }, { "epoch": 91.072, "grad_norm": 42.75737380981445, "learning_rate": 5.008888888888889e-06, "loss": 0.9785, "step": 11384 }, { "epoch": 91.08, "grad_norm": 15.67501163482666, "learning_rate": 5.004444444444445e-06, "loss": 0.8389, "step": 11385 }, { "epoch": 91.088, "grad_norm": 37.141056060791016, "learning_rate": 5e-06, "loss": 0.5386, "step": 11386 }, { "epoch": 91.096, "grad_norm": 20.797101974487305, "learning_rate": 4.995555555555556e-06, "loss": 0.6918, "step": 11387 }, { "epoch": 91.104, "grad_norm": 16.960731506347656, "learning_rate": 4.991111111111112e-06, "loss": 1.9722, "step": 11388 }, { "epoch": 91.112, "grad_norm": 32.32819366455078, "learning_rate": 4.986666666666667e-06, "loss": 0.7313, "step": 11389 }, { "epoch": 91.12, "grad_norm": 31.731081008911133, "learning_rate": 4.982222222222222e-06, "loss": 1.8111, "step": 11390 }, { "epoch": 91.128, "grad_norm": 25.365224838256836, "learning_rate": 4.977777777777778e-06, "loss": 0.8017, "step": 11391 }, { "epoch": 91.136, "grad_norm": 25.087430953979492, "learning_rate": 4.973333333333334e-06, "loss": 1.0539, "step": 11392 }, { "epoch": 91.144, "grad_norm": 15.135586738586426, "learning_rate": 4.9688888888888886e-06, "loss": 0.792, "step": 11393 }, { "epoch": 91.152, "grad_norm": 12.904868125915527, "learning_rate": 4.964444444444444e-06, "loss": 0.7373, "step": 11394 }, { "epoch": 91.16, "grad_norm": 12.953569412231445, "learning_rate": 4.96e-06, "loss": 0.73, "step": 11395 }, { "epoch": 91.168, "grad_norm": 15.311824798583984, "learning_rate": 4.955555555555556e-06, "loss": 0.5554, "step": 11396 }, { "epoch": 91.176, "grad_norm": 14.766283988952637, "learning_rate": 4.951111111111111e-06, "loss": 0.6258, "step": 11397 }, { "epoch": 91.184, "grad_norm": 27.29790687561035, "learning_rate": 4.946666666666667e-06, "loss": 1.0838, "step": 11398 }, { "epoch": 91.192, "grad_norm": 20.555444717407227, "learning_rate": 4.942222222222223e-06, "loss": 0.9212, "step": 11399 }, { "epoch": 91.2, "grad_norm": 30.924686431884766, "learning_rate": 4.9377777777777776e-06, "loss": 0.8762, "step": 11400 }, { "epoch": 91.208, "grad_norm": 18.870059967041016, "learning_rate": 4.933333333333333e-06, "loss": 0.8414, "step": 11401 }, { "epoch": 91.216, "grad_norm": 21.696035385131836, "learning_rate": 4.928888888888889e-06, "loss": 1.9097, "step": 11402 }, { "epoch": 91.224, "grad_norm": 21.11311912536621, "learning_rate": 4.924444444444445e-06, "loss": 1.393, "step": 11403 }, { "epoch": 91.232, "grad_norm": 17.19361686706543, "learning_rate": 4.92e-06, "loss": 0.8773, "step": 11404 }, { "epoch": 91.24, "grad_norm": 25.38372230529785, "learning_rate": 4.915555555555556e-06, "loss": 1.0097, "step": 11405 }, { "epoch": 91.248, "grad_norm": 31.003448486328125, "learning_rate": 4.911111111111112e-06, "loss": 0.7289, "step": 11406 }, { "epoch": 91.256, "grad_norm": 43.70675277709961, "learning_rate": 4.906666666666667e-06, "loss": 0.8447, "step": 11407 }, { "epoch": 91.264, "grad_norm": 104.78668212890625, "learning_rate": 4.902222222222222e-06, "loss": 0.7057, "step": 11408 }, { "epoch": 91.272, "grad_norm": 27.245349884033203, "learning_rate": 4.897777777777778e-06, "loss": 0.7991, "step": 11409 }, { "epoch": 91.28, "grad_norm": 28.36664390563965, "learning_rate": 4.893333333333334e-06, "loss": 0.6558, "step": 11410 }, { "epoch": 91.288, "grad_norm": 19.820024490356445, "learning_rate": 4.888888888888889e-06, "loss": 0.7877, "step": 11411 }, { "epoch": 91.296, "grad_norm": 25.950611114501953, "learning_rate": 4.884444444444445e-06, "loss": 0.5516, "step": 11412 }, { "epoch": 91.304, "grad_norm": 28.532739639282227, "learning_rate": 4.880000000000001e-06, "loss": 0.7147, "step": 11413 }, { "epoch": 91.312, "grad_norm": 14.020471572875977, "learning_rate": 4.875555555555556e-06, "loss": 0.985, "step": 11414 }, { "epoch": 91.32, "grad_norm": 67.83446502685547, "learning_rate": 4.871111111111111e-06, "loss": 0.7657, "step": 11415 }, { "epoch": 91.328, "grad_norm": 29.732006072998047, "learning_rate": 4.866666666666667e-06, "loss": 0.6716, "step": 11416 }, { "epoch": 91.336, "grad_norm": 25.811555862426758, "learning_rate": 4.862222222222222e-06, "loss": 0.8503, "step": 11417 }, { "epoch": 91.344, "grad_norm": 55.453575134277344, "learning_rate": 4.8577777777777775e-06, "loss": 0.7783, "step": 11418 }, { "epoch": 91.352, "grad_norm": 18.073354721069336, "learning_rate": 4.853333333333333e-06, "loss": 0.6112, "step": 11419 }, { "epoch": 91.36, "grad_norm": 38.10764694213867, "learning_rate": 4.848888888888889e-06, "loss": 1.0025, "step": 11420 }, { "epoch": 91.368, "grad_norm": 28.11591911315918, "learning_rate": 4.8444444444444446e-06, "loss": 0.7804, "step": 11421 }, { "epoch": 91.376, "grad_norm": 31.94392204284668, "learning_rate": 4.84e-06, "loss": 0.7983, "step": 11422 }, { "epoch": 91.384, "grad_norm": 20.654783248901367, "learning_rate": 4.835555555555556e-06, "loss": 0.6393, "step": 11423 }, { "epoch": 91.392, "grad_norm": 15.745625495910645, "learning_rate": 4.831111111111112e-06, "loss": 1.1065, "step": 11424 }, { "epoch": 91.4, "grad_norm": 17.08847427368164, "learning_rate": 4.8266666666666665e-06, "loss": 0.6907, "step": 11425 }, { "epoch": 91.408, "grad_norm": 19.154359817504883, "learning_rate": 4.822222222222222e-06, "loss": 1.0275, "step": 11426 }, { "epoch": 91.416, "grad_norm": 16.794958114624023, "learning_rate": 4.817777777777778e-06, "loss": 0.4957, "step": 11427 }, { "epoch": 91.424, "grad_norm": 111.43287658691406, "learning_rate": 4.8133333333333336e-06, "loss": 2.0387, "step": 11428 }, { "epoch": 91.432, "grad_norm": 14.981023788452148, "learning_rate": 4.808888888888889e-06, "loss": 0.6615, "step": 11429 }, { "epoch": 91.44, "grad_norm": 40.77240753173828, "learning_rate": 4.804444444444445e-06, "loss": 0.7955, "step": 11430 }, { "epoch": 91.448, "grad_norm": 115.13773345947266, "learning_rate": 4.800000000000001e-06, "loss": 0.6477, "step": 11431 }, { "epoch": 91.456, "grad_norm": 18.890682220458984, "learning_rate": 4.795555555555556e-06, "loss": 1.0461, "step": 11432 }, { "epoch": 91.464, "grad_norm": 23.44930076599121, "learning_rate": 4.791111111111111e-06, "loss": 0.7677, "step": 11433 }, { "epoch": 91.472, "grad_norm": 18.252260208129883, "learning_rate": 4.786666666666667e-06, "loss": 0.741, "step": 11434 }, { "epoch": 91.48, "grad_norm": 14.61309814453125, "learning_rate": 4.7822222222222226e-06, "loss": 0.693, "step": 11435 }, { "epoch": 91.488, "grad_norm": 22.102088928222656, "learning_rate": 4.777777777777778e-06, "loss": 1.1258, "step": 11436 }, { "epoch": 91.496, "grad_norm": 26.40462875366211, "learning_rate": 4.773333333333334e-06, "loss": 0.9408, "step": 11437 }, { "epoch": 91.504, "grad_norm": 30.350492477416992, "learning_rate": 4.76888888888889e-06, "loss": 0.5149, "step": 11438 }, { "epoch": 91.512, "grad_norm": 20.493728637695312, "learning_rate": 4.764444444444445e-06, "loss": 0.5768, "step": 11439 }, { "epoch": 91.52, "grad_norm": 22.014726638793945, "learning_rate": 4.76e-06, "loss": 0.7355, "step": 11440 }, { "epoch": 91.528, "grad_norm": 29.368268966674805, "learning_rate": 4.755555555555556e-06, "loss": 0.7939, "step": 11441 }, { "epoch": 91.536, "grad_norm": 16.244659423828125, "learning_rate": 4.751111111111111e-06, "loss": 1.0337, "step": 11442 }, { "epoch": 91.544, "grad_norm": 16.594562530517578, "learning_rate": 4.746666666666666e-06, "loss": 0.5234, "step": 11443 }, { "epoch": 91.552, "grad_norm": 20.621431350708008, "learning_rate": 4.742222222222222e-06, "loss": 1.119, "step": 11444 }, { "epoch": 91.56, "grad_norm": 25.867084503173828, "learning_rate": 4.737777777777778e-06, "loss": 0.7761, "step": 11445 }, { "epoch": 91.568, "grad_norm": 41.69773864746094, "learning_rate": 4.7333333333333335e-06, "loss": 0.5824, "step": 11446 }, { "epoch": 91.576, "grad_norm": 27.430692672729492, "learning_rate": 4.728888888888889e-06, "loss": 0.9807, "step": 11447 }, { "epoch": 91.584, "grad_norm": 64.26445007324219, "learning_rate": 4.724444444444445e-06, "loss": 1.265, "step": 11448 }, { "epoch": 91.592, "grad_norm": 18.78572654724121, "learning_rate": 4.72e-06, "loss": 0.812, "step": 11449 }, { "epoch": 91.6, "grad_norm": 15.352534294128418, "learning_rate": 4.715555555555555e-06, "loss": 0.7126, "step": 11450 }, { "epoch": 91.608, "grad_norm": 24.044845581054688, "learning_rate": 4.711111111111111e-06, "loss": 0.8516, "step": 11451 }, { "epoch": 91.616, "grad_norm": 12.248961448669434, "learning_rate": 4.706666666666667e-06, "loss": 0.7851, "step": 11452 }, { "epoch": 91.624, "grad_norm": 44.69369125366211, "learning_rate": 4.7022222222222225e-06, "loss": 0.8474, "step": 11453 }, { "epoch": 91.632, "grad_norm": 17.487194061279297, "learning_rate": 4.697777777777778e-06, "loss": 0.5885, "step": 11454 }, { "epoch": 91.64, "grad_norm": 15.049991607666016, "learning_rate": 4.693333333333334e-06, "loss": 0.7272, "step": 11455 }, { "epoch": 91.648, "grad_norm": 41.00471878051758, "learning_rate": 4.6888888888888895e-06, "loss": 0.905, "step": 11456 }, { "epoch": 91.656, "grad_norm": 20.357709884643555, "learning_rate": 4.684444444444444e-06, "loss": 0.5412, "step": 11457 }, { "epoch": 91.664, "grad_norm": 20.143009185791016, "learning_rate": 4.68e-06, "loss": 0.3912, "step": 11458 }, { "epoch": 91.672, "grad_norm": 30.96844482421875, "learning_rate": 4.675555555555556e-06, "loss": 1.2857, "step": 11459 }, { "epoch": 91.68, "grad_norm": 29.677906036376953, "learning_rate": 4.6711111111111115e-06, "loss": 0.576, "step": 11460 }, { "epoch": 91.688, "grad_norm": 24.272878646850586, "learning_rate": 4.666666666666667e-06, "loss": 0.5582, "step": 11461 }, { "epoch": 91.696, "grad_norm": 22.0208683013916, "learning_rate": 4.662222222222223e-06, "loss": 0.7394, "step": 11462 }, { "epoch": 91.704, "grad_norm": 14.211851119995117, "learning_rate": 4.6577777777777785e-06, "loss": 0.8017, "step": 11463 }, { "epoch": 91.712, "grad_norm": 94.53235626220703, "learning_rate": 4.653333333333334e-06, "loss": 1.453, "step": 11464 }, { "epoch": 91.72, "grad_norm": 49.82110595703125, "learning_rate": 4.648888888888889e-06, "loss": 0.6192, "step": 11465 }, { "epoch": 91.728, "grad_norm": 47.534908294677734, "learning_rate": 4.644444444444444e-06, "loss": 0.6718, "step": 11466 }, { "epoch": 91.736, "grad_norm": 42.27075958251953, "learning_rate": 4.64e-06, "loss": 0.6399, "step": 11467 }, { "epoch": 91.744, "grad_norm": 17.930923461914062, "learning_rate": 4.635555555555555e-06, "loss": 0.5687, "step": 11468 }, { "epoch": 91.752, "grad_norm": 21.40251350402832, "learning_rate": 4.631111111111111e-06, "loss": 1.0337, "step": 11469 }, { "epoch": 91.76, "grad_norm": 114.4234619140625, "learning_rate": 4.626666666666667e-06, "loss": 0.6153, "step": 11470 }, { "epoch": 91.768, "grad_norm": 22.099864959716797, "learning_rate": 4.622222222222222e-06, "loss": 0.7604, "step": 11471 }, { "epoch": 91.776, "grad_norm": 34.20452880859375, "learning_rate": 4.617777777777778e-06, "loss": 0.7762, "step": 11472 }, { "epoch": 91.784, "grad_norm": 118.36526489257812, "learning_rate": 4.613333333333334e-06, "loss": 2.4809, "step": 11473 }, { "epoch": 91.792, "grad_norm": 13.431716918945312, "learning_rate": 4.608888888888889e-06, "loss": 0.9593, "step": 11474 }, { "epoch": 91.8, "grad_norm": 9.605631828308105, "learning_rate": 4.604444444444444e-06, "loss": 0.626, "step": 11475 }, { "epoch": 91.808, "grad_norm": 111.0583724975586, "learning_rate": 4.6e-06, "loss": 0.8707, "step": 11476 }, { "epoch": 91.816, "grad_norm": 19.42575454711914, "learning_rate": 4.595555555555556e-06, "loss": 0.8376, "step": 11477 }, { "epoch": 91.824, "grad_norm": 18.16069221496582, "learning_rate": 4.591111111111111e-06, "loss": 0.8616, "step": 11478 }, { "epoch": 91.832, "grad_norm": 26.330171585083008, "learning_rate": 4.586666666666667e-06, "loss": 1.0061, "step": 11479 }, { "epoch": 91.84, "grad_norm": 20.2015380859375, "learning_rate": 4.582222222222223e-06, "loss": 0.7421, "step": 11480 }, { "epoch": 91.848, "grad_norm": 23.39946174621582, "learning_rate": 4.5777777777777785e-06, "loss": 0.9454, "step": 11481 }, { "epoch": 91.856, "grad_norm": 28.477163314819336, "learning_rate": 4.573333333333333e-06, "loss": 0.801, "step": 11482 }, { "epoch": 91.864, "grad_norm": 23.739423751831055, "learning_rate": 4.568888888888889e-06, "loss": 0.8465, "step": 11483 }, { "epoch": 91.872, "grad_norm": 13.386033058166504, "learning_rate": 4.564444444444445e-06, "loss": 1.2484, "step": 11484 }, { "epoch": 91.88, "grad_norm": 20.299388885498047, "learning_rate": 4.56e-06, "loss": 0.536, "step": 11485 }, { "epoch": 91.888, "grad_norm": 23.15911865234375, "learning_rate": 4.555555555555556e-06, "loss": 0.8764, "step": 11486 }, { "epoch": 91.896, "grad_norm": 17.157154083251953, "learning_rate": 4.551111111111112e-06, "loss": 0.6841, "step": 11487 }, { "epoch": 91.904, "grad_norm": 42.27806854248047, "learning_rate": 4.5466666666666675e-06, "loss": 0.8858, "step": 11488 }, { "epoch": 91.912, "grad_norm": 43.48796081542969, "learning_rate": 4.542222222222222e-06, "loss": 1.3636, "step": 11489 }, { "epoch": 91.92, "grad_norm": 36.69265365600586, "learning_rate": 4.537777777777778e-06, "loss": 0.671, "step": 11490 }, { "epoch": 91.928, "grad_norm": 12.41987419128418, "learning_rate": 4.533333333333334e-06, "loss": 0.7267, "step": 11491 }, { "epoch": 91.936, "grad_norm": 145.74484252929688, "learning_rate": 4.5288888888888885e-06, "loss": 0.6764, "step": 11492 }, { "epoch": 91.944, "grad_norm": 16.115575790405273, "learning_rate": 4.524444444444444e-06, "loss": 0.5666, "step": 11493 }, { "epoch": 91.952, "grad_norm": 49.996456146240234, "learning_rate": 4.52e-06, "loss": 0.8329, "step": 11494 }, { "epoch": 91.96, "grad_norm": 41.62706756591797, "learning_rate": 4.515555555555556e-06, "loss": 0.6736, "step": 11495 }, { "epoch": 91.968, "grad_norm": 17.11890983581543, "learning_rate": 4.511111111111111e-06, "loss": 0.7038, "step": 11496 }, { "epoch": 91.976, "grad_norm": 30.54080581665039, "learning_rate": 4.506666666666667e-06, "loss": 0.9264, "step": 11497 }, { "epoch": 91.984, "grad_norm": 19.426252365112305, "learning_rate": 4.502222222222222e-06, "loss": 0.7429, "step": 11498 }, { "epoch": 91.992, "grad_norm": 16.573959350585938, "learning_rate": 4.4977777777777775e-06, "loss": 0.9898, "step": 11499 }, { "epoch": 92.0, "grad_norm": 20.82301902770996, "learning_rate": 4.493333333333333e-06, "loss": 0.6297, "step": 11500 }, { "epoch": 92.0, "eval_loss": 0.9200900793075562, "eval_map": 0.4896, "eval_map_50": 0.8291, "eval_map_75": 0.4858, "eval_map_Coverall": 0.7101, "eval_map_Face_Shield": 0.5977, "eval_map_Gloves": 0.3853, "eval_map_Goggles": 0.3112, "eval_map_Mask": 0.4436, "eval_map_large": 0.6759, "eval_map_medium": 0.3669, "eval_map_small": 0.3972, "eval_mar_1": 0.3613, "eval_mar_10": 0.6014, "eval_mar_100": 0.6118, "eval_mar_100_Coverall": 0.7844, "eval_mar_100_Face_Shield": 0.7235, "eval_mar_100_Gloves": 0.5033, "eval_mar_100_Goggles": 0.5188, "eval_mar_100_Mask": 0.5288, "eval_mar_large": 0.7904, "eval_mar_medium": 0.5182, "eval_mar_small": 0.4694, "eval_runtime": 0.9114, "eval_samples_per_second": 31.819, "eval_steps_per_second": 2.194, "step": 11500 }, { "epoch": 92.008, "grad_norm": 29.764482498168945, "learning_rate": 4.488888888888889e-06, "loss": 0.8517, "step": 11501 }, { "epoch": 92.016, "grad_norm": 35.83708953857422, "learning_rate": 4.484444444444445e-06, "loss": 0.7824, "step": 11502 }, { "epoch": 92.024, "grad_norm": 25.400630950927734, "learning_rate": 4.48e-06, "loss": 0.8892, "step": 11503 }, { "epoch": 92.032, "grad_norm": 28.88829231262207, "learning_rate": 4.475555555555556e-06, "loss": 0.9861, "step": 11504 }, { "epoch": 92.04, "grad_norm": 16.11166000366211, "learning_rate": 4.471111111111112e-06, "loss": 0.7063, "step": 11505 }, { "epoch": 92.048, "grad_norm": 21.886947631835938, "learning_rate": 4.4666666666666665e-06, "loss": 0.6787, "step": 11506 }, { "epoch": 92.056, "grad_norm": 15.615707397460938, "learning_rate": 4.462222222222222e-06, "loss": 0.8053, "step": 11507 }, { "epoch": 92.064, "grad_norm": 15.279265403747559, "learning_rate": 4.457777777777778e-06, "loss": 0.7944, "step": 11508 }, { "epoch": 92.072, "grad_norm": 29.667537689208984, "learning_rate": 4.453333333333334e-06, "loss": 0.6391, "step": 11509 }, { "epoch": 92.08, "grad_norm": 35.667396545410156, "learning_rate": 4.448888888888889e-06, "loss": 0.9786, "step": 11510 }, { "epoch": 92.088, "grad_norm": 35.851226806640625, "learning_rate": 4.444444444444445e-06, "loss": 1.8274, "step": 11511 }, { "epoch": 92.096, "grad_norm": 28.368175506591797, "learning_rate": 4.440000000000001e-06, "loss": 0.5809, "step": 11512 }, { "epoch": 92.104, "grad_norm": 23.906232833862305, "learning_rate": 4.435555555555556e-06, "loss": 0.7655, "step": 11513 }, { "epoch": 92.112, "grad_norm": 19.284664154052734, "learning_rate": 4.431111111111111e-06, "loss": 1.0392, "step": 11514 }, { "epoch": 92.12, "grad_norm": 20.748210906982422, "learning_rate": 4.426666666666667e-06, "loss": 0.6136, "step": 11515 }, { "epoch": 92.128, "grad_norm": 19.695117950439453, "learning_rate": 4.422222222222223e-06, "loss": 1.0628, "step": 11516 }, { "epoch": 92.136, "grad_norm": 18.934951782226562, "learning_rate": 4.417777777777778e-06, "loss": 0.9273, "step": 11517 }, { "epoch": 92.144, "grad_norm": 26.302047729492188, "learning_rate": 4.413333333333333e-06, "loss": 1.3123, "step": 11518 }, { "epoch": 92.152, "grad_norm": 23.70608901977539, "learning_rate": 4.408888888888889e-06, "loss": 0.8158, "step": 11519 }, { "epoch": 92.16, "grad_norm": 35.817298889160156, "learning_rate": 4.4044444444444445e-06, "loss": 1.0628, "step": 11520 }, { "epoch": 92.168, "grad_norm": 24.91607093811035, "learning_rate": 4.4e-06, "loss": 0.8911, "step": 11521 }, { "epoch": 92.176, "grad_norm": 38.698204040527344, "learning_rate": 4.395555555555556e-06, "loss": 0.8221, "step": 11522 }, { "epoch": 92.184, "grad_norm": 21.341167449951172, "learning_rate": 4.391111111111111e-06, "loss": 0.7902, "step": 11523 }, { "epoch": 92.192, "grad_norm": 20.19610595703125, "learning_rate": 4.3866666666666665e-06, "loss": 0.9425, "step": 11524 }, { "epoch": 92.2, "grad_norm": 63.1011962890625, "learning_rate": 4.382222222222222e-06, "loss": 0.7707, "step": 11525 }, { "epoch": 92.208, "grad_norm": 24.94356918334961, "learning_rate": 4.377777777777778e-06, "loss": 0.7373, "step": 11526 }, { "epoch": 92.216, "grad_norm": 17.57794189453125, "learning_rate": 4.3733333333333335e-06, "loss": 0.5773, "step": 11527 }, { "epoch": 92.224, "grad_norm": 18.554410934448242, "learning_rate": 4.368888888888889e-06, "loss": 1.005, "step": 11528 }, { "epoch": 92.232, "grad_norm": 32.84150695800781, "learning_rate": 4.364444444444445e-06, "loss": 0.5407, "step": 11529 }, { "epoch": 92.24, "grad_norm": 17.773876190185547, "learning_rate": 4.360000000000001e-06, "loss": 0.5472, "step": 11530 }, { "epoch": 92.248, "grad_norm": 47.38398742675781, "learning_rate": 4.3555555555555555e-06, "loss": 1.0515, "step": 11531 }, { "epoch": 92.256, "grad_norm": 342.54644775390625, "learning_rate": 4.351111111111111e-06, "loss": 0.7197, "step": 11532 }, { "epoch": 92.264, "grad_norm": 47.18364334106445, "learning_rate": 4.346666666666667e-06, "loss": 0.7098, "step": 11533 }, { "epoch": 92.272, "grad_norm": 23.80021858215332, "learning_rate": 4.3422222222222225e-06, "loss": 0.5529, "step": 11534 }, { "epoch": 92.28, "grad_norm": 39.02288055419922, "learning_rate": 4.337777777777778e-06, "loss": 1.2841, "step": 11535 }, { "epoch": 92.288, "grad_norm": 54.14097595214844, "learning_rate": 4.333333333333334e-06, "loss": 0.9681, "step": 11536 }, { "epoch": 92.296, "grad_norm": 25.693866729736328, "learning_rate": 4.32888888888889e-06, "loss": 0.9221, "step": 11537 }, { "epoch": 92.304, "grad_norm": 26.29315185546875, "learning_rate": 4.3244444444444445e-06, "loss": 0.7201, "step": 11538 }, { "epoch": 92.312, "grad_norm": 15.172576904296875, "learning_rate": 4.32e-06, "loss": 0.9484, "step": 11539 }, { "epoch": 92.32, "grad_norm": 25.845966339111328, "learning_rate": 4.315555555555556e-06, "loss": 0.8212, "step": 11540 }, { "epoch": 92.328, "grad_norm": 23.017871856689453, "learning_rate": 4.3111111111111115e-06, "loss": 0.8364, "step": 11541 }, { "epoch": 92.336, "grad_norm": 30.7033634185791, "learning_rate": 4.306666666666667e-06, "loss": 0.7835, "step": 11542 }, { "epoch": 92.344, "grad_norm": 25.297700881958008, "learning_rate": 4.302222222222223e-06, "loss": 0.9934, "step": 11543 }, { "epoch": 92.352, "grad_norm": 31.606403350830078, "learning_rate": 4.297777777777778e-06, "loss": 1.3919, "step": 11544 }, { "epoch": 92.36, "grad_norm": 29.428627014160156, "learning_rate": 4.2933333333333334e-06, "loss": 0.8051, "step": 11545 }, { "epoch": 92.368, "grad_norm": 68.8193130493164, "learning_rate": 4.288888888888889e-06, "loss": 0.5424, "step": 11546 }, { "epoch": 92.376, "grad_norm": 27.461191177368164, "learning_rate": 4.284444444444444e-06, "loss": 1.9547, "step": 11547 }, { "epoch": 92.384, "grad_norm": 28.393959045410156, "learning_rate": 4.28e-06, "loss": 1.077, "step": 11548 }, { "epoch": 92.392, "grad_norm": 17.45672607421875, "learning_rate": 4.275555555555555e-06, "loss": 0.501, "step": 11549 }, { "epoch": 92.4, "grad_norm": 23.851634979248047, "learning_rate": 4.271111111111111e-06, "loss": 0.7748, "step": 11550 }, { "epoch": 92.408, "grad_norm": 14.388940811157227, "learning_rate": 4.266666666666667e-06, "loss": 0.6989, "step": 11551 }, { "epoch": 92.416, "grad_norm": 26.210023880004883, "learning_rate": 4.2622222222222224e-06, "loss": 1.3871, "step": 11552 }, { "epoch": 92.424, "grad_norm": 49.52701187133789, "learning_rate": 4.257777777777778e-06, "loss": 0.7969, "step": 11553 }, { "epoch": 92.432, "grad_norm": 13.689579010009766, "learning_rate": 4.253333333333334e-06, "loss": 1.141, "step": 11554 }, { "epoch": 92.44, "grad_norm": 20.868345260620117, "learning_rate": 4.248888888888889e-06, "loss": 0.5855, "step": 11555 }, { "epoch": 92.448, "grad_norm": 25.28548240661621, "learning_rate": 4.244444444444444e-06, "loss": 0.8083, "step": 11556 }, { "epoch": 92.456, "grad_norm": 63.01807403564453, "learning_rate": 4.24e-06, "loss": 0.8622, "step": 11557 }, { "epoch": 92.464, "grad_norm": 32.30759048461914, "learning_rate": 4.235555555555556e-06, "loss": 0.8378, "step": 11558 }, { "epoch": 92.472, "grad_norm": 60.52983093261719, "learning_rate": 4.2311111111111114e-06, "loss": 0.875, "step": 11559 }, { "epoch": 92.48, "grad_norm": 15.977206230163574, "learning_rate": 4.226666666666667e-06, "loss": 0.8133, "step": 11560 }, { "epoch": 92.488, "grad_norm": 42.36701583862305, "learning_rate": 4.222222222222223e-06, "loss": 0.8086, "step": 11561 }, { "epoch": 92.496, "grad_norm": 20.088226318359375, "learning_rate": 4.2177777777777785e-06, "loss": 0.6773, "step": 11562 }, { "epoch": 92.504, "grad_norm": 25.380382537841797, "learning_rate": 4.213333333333333e-06, "loss": 0.5611, "step": 11563 }, { "epoch": 92.512, "grad_norm": 14.673843383789062, "learning_rate": 4.208888888888889e-06, "loss": 0.6692, "step": 11564 }, { "epoch": 92.52, "grad_norm": 21.81538200378418, "learning_rate": 4.204444444444445e-06, "loss": 0.4808, "step": 11565 }, { "epoch": 92.528, "grad_norm": 24.24637222290039, "learning_rate": 4.2000000000000004e-06, "loss": 0.839, "step": 11566 }, { "epoch": 92.536, "grad_norm": 20.671628952026367, "learning_rate": 4.195555555555556e-06, "loss": 1.1221, "step": 11567 }, { "epoch": 92.544, "grad_norm": 24.14348602294922, "learning_rate": 4.191111111111112e-06, "loss": 0.8115, "step": 11568 }, { "epoch": 92.552, "grad_norm": 26.71170425415039, "learning_rate": 4.1866666666666675e-06, "loss": 0.7846, "step": 11569 }, { "epoch": 92.56, "grad_norm": 33.903621673583984, "learning_rate": 4.182222222222222e-06, "loss": 0.8161, "step": 11570 }, { "epoch": 92.568, "grad_norm": 16.840024948120117, "learning_rate": 4.177777777777778e-06, "loss": 0.7318, "step": 11571 }, { "epoch": 92.576, "grad_norm": 45.70254898071289, "learning_rate": 4.173333333333333e-06, "loss": 1.6764, "step": 11572 }, { "epoch": 92.584, "grad_norm": 13.842622756958008, "learning_rate": 4.168888888888889e-06, "loss": 0.5236, "step": 11573 }, { "epoch": 92.592, "grad_norm": 36.4721565246582, "learning_rate": 4.164444444444444e-06, "loss": 1.1038, "step": 11574 }, { "epoch": 92.6, "grad_norm": 52.16042709350586, "learning_rate": 4.16e-06, "loss": 0.5987, "step": 11575 }, { "epoch": 92.608, "grad_norm": 15.82248592376709, "learning_rate": 4.155555555555556e-06, "loss": 0.5073, "step": 11576 }, { "epoch": 92.616, "grad_norm": 25.261140823364258, "learning_rate": 4.151111111111111e-06, "loss": 0.7661, "step": 11577 }, { "epoch": 92.624, "grad_norm": 4449.9970703125, "learning_rate": 4.146666666666667e-06, "loss": 1.1502, "step": 11578 }, { "epoch": 92.632, "grad_norm": 31.1435546875, "learning_rate": 4.142222222222223e-06, "loss": 0.7848, "step": 11579 }, { "epoch": 92.64, "grad_norm": 29.557950973510742, "learning_rate": 4.137777777777778e-06, "loss": 0.7593, "step": 11580 }, { "epoch": 92.648, "grad_norm": 40.903717041015625, "learning_rate": 4.133333333333333e-06, "loss": 0.7556, "step": 11581 }, { "epoch": 92.656, "grad_norm": 52.657711029052734, "learning_rate": 4.128888888888889e-06, "loss": 0.509, "step": 11582 }, { "epoch": 92.664, "grad_norm": 18.195938110351562, "learning_rate": 4.124444444444445e-06, "loss": 0.6071, "step": 11583 }, { "epoch": 92.672, "grad_norm": 20.541494369506836, "learning_rate": 4.12e-06, "loss": 1.5174, "step": 11584 }, { "epoch": 92.68, "grad_norm": 18.404930114746094, "learning_rate": 4.115555555555556e-06, "loss": 0.897, "step": 11585 }, { "epoch": 92.688, "grad_norm": 130.6682586669922, "learning_rate": 4.111111111111112e-06, "loss": 0.7085, "step": 11586 }, { "epoch": 92.696, "grad_norm": 37.33238220214844, "learning_rate": 4.106666666666667e-06, "loss": 0.9864, "step": 11587 }, { "epoch": 92.704, "grad_norm": 22.16082763671875, "learning_rate": 4.102222222222222e-06, "loss": 0.9421, "step": 11588 }, { "epoch": 92.712, "grad_norm": 24.579509735107422, "learning_rate": 4.097777777777778e-06, "loss": 0.7905, "step": 11589 }, { "epoch": 92.72, "grad_norm": 18.7377872467041, "learning_rate": 4.093333333333334e-06, "loss": 0.5362, "step": 11590 }, { "epoch": 92.728, "grad_norm": 17.334400177001953, "learning_rate": 4.088888888888889e-06, "loss": 0.6606, "step": 11591 }, { "epoch": 92.736, "grad_norm": 22.410085678100586, "learning_rate": 4.084444444444445e-06, "loss": 0.7327, "step": 11592 }, { "epoch": 92.744, "grad_norm": 18.548660278320312, "learning_rate": 4.080000000000001e-06, "loss": 0.6107, "step": 11593 }, { "epoch": 92.752, "grad_norm": 17.06512451171875, "learning_rate": 4.0755555555555564e-06, "loss": 0.8271, "step": 11594 }, { "epoch": 92.76, "grad_norm": 33.680850982666016, "learning_rate": 4.071111111111111e-06, "loss": 0.6003, "step": 11595 }, { "epoch": 92.768, "grad_norm": 26.016740798950195, "learning_rate": 4.066666666666666e-06, "loss": 0.8085, "step": 11596 }, { "epoch": 92.776, "grad_norm": 21.892690658569336, "learning_rate": 4.062222222222222e-06, "loss": 0.8824, "step": 11597 }, { "epoch": 92.784, "grad_norm": 17.993030548095703, "learning_rate": 4.0577777777777775e-06, "loss": 0.633, "step": 11598 }, { "epoch": 92.792, "grad_norm": 33.91016387939453, "learning_rate": 4.053333333333333e-06, "loss": 1.2062, "step": 11599 }, { "epoch": 92.8, "grad_norm": 19.937255859375, "learning_rate": 4.048888888888889e-06, "loss": 0.572, "step": 11600 }, { "epoch": 92.808, "grad_norm": 172.8030242919922, "learning_rate": 4.044444444444445e-06, "loss": 0.62, "step": 11601 }, { "epoch": 92.816, "grad_norm": 25.712007522583008, "learning_rate": 4.04e-06, "loss": 0.8192, "step": 11602 }, { "epoch": 92.824, "grad_norm": 37.41301345825195, "learning_rate": 4.035555555555556e-06, "loss": 0.7174, "step": 11603 }, { "epoch": 92.832, "grad_norm": 26.257305145263672, "learning_rate": 4.031111111111111e-06, "loss": 1.0357, "step": 11604 }, { "epoch": 92.84, "grad_norm": 24.42664337158203, "learning_rate": 4.0266666666666665e-06, "loss": 0.836, "step": 11605 }, { "epoch": 92.848, "grad_norm": 21.66649055480957, "learning_rate": 4.022222222222222e-06, "loss": 0.53, "step": 11606 }, { "epoch": 92.856, "grad_norm": 21.187875747680664, "learning_rate": 4.017777777777778e-06, "loss": 0.5684, "step": 11607 }, { "epoch": 92.864, "grad_norm": 20.204084396362305, "learning_rate": 4.013333333333334e-06, "loss": 0.826, "step": 11608 }, { "epoch": 92.872, "grad_norm": 25.547012329101562, "learning_rate": 4.008888888888889e-06, "loss": 0.7117, "step": 11609 }, { "epoch": 92.88, "grad_norm": 17.359500885009766, "learning_rate": 4.004444444444445e-06, "loss": 0.6873, "step": 11610 }, { "epoch": 92.888, "grad_norm": 26.22002410888672, "learning_rate": 4.000000000000001e-06, "loss": 0.5677, "step": 11611 }, { "epoch": 92.896, "grad_norm": 12.983171463012695, "learning_rate": 3.9955555555555555e-06, "loss": 0.8026, "step": 11612 }, { "epoch": 92.904, "grad_norm": 18.553892135620117, "learning_rate": 3.991111111111111e-06, "loss": 0.9711, "step": 11613 }, { "epoch": 92.912, "grad_norm": 44.20850372314453, "learning_rate": 3.986666666666667e-06, "loss": 1.3007, "step": 11614 }, { "epoch": 92.92, "grad_norm": 85.13506317138672, "learning_rate": 3.982222222222223e-06, "loss": 0.7267, "step": 11615 }, { "epoch": 92.928, "grad_norm": 16.72681427001953, "learning_rate": 3.977777777777778e-06, "loss": 0.5816, "step": 11616 }, { "epoch": 92.936, "grad_norm": 22.59049415588379, "learning_rate": 3.973333333333334e-06, "loss": 0.8219, "step": 11617 }, { "epoch": 92.944, "grad_norm": 23.56014060974121, "learning_rate": 3.96888888888889e-06, "loss": 0.5837, "step": 11618 }, { "epoch": 92.952, "grad_norm": 19.314125061035156, "learning_rate": 3.9644444444444445e-06, "loss": 0.6213, "step": 11619 }, { "epoch": 92.96, "grad_norm": 22.280101776123047, "learning_rate": 3.96e-06, "loss": 0.8945, "step": 11620 }, { "epoch": 92.968, "grad_norm": 63.90970993041992, "learning_rate": 3.955555555555555e-06, "loss": 3.0575, "step": 11621 }, { "epoch": 92.976, "grad_norm": 48.49250030517578, "learning_rate": 3.951111111111111e-06, "loss": 0.9066, "step": 11622 }, { "epoch": 92.984, "grad_norm": 28.911190032958984, "learning_rate": 3.9466666666666664e-06, "loss": 0.9911, "step": 11623 }, { "epoch": 92.992, "grad_norm": 14.94664478302002, "learning_rate": 3.942222222222222e-06, "loss": 0.687, "step": 11624 }, { "epoch": 93.0, "grad_norm": 25.142858505249023, "learning_rate": 3.937777777777778e-06, "loss": 0.7369, "step": 11625 }, { "epoch": 93.0, "eval_loss": 0.9046079516410828, "eval_map": 0.4868, "eval_map_50": 0.8279, "eval_map_75": 0.4728, "eval_map_Coverall": 0.7055, "eval_map_Face_Shield": 0.5953, "eval_map_Gloves": 0.3934, "eval_map_Goggles": 0.2936, "eval_map_Mask": 0.4462, "eval_map_large": 0.7063, "eval_map_medium": 0.3501, "eval_map_small": 0.3957, "eval_mar_1": 0.3601, "eval_mar_10": 0.6042, "eval_mar_100": 0.6148, "eval_mar_100_Coverall": 0.78, "eval_mar_100_Face_Shield": 0.7118, "eval_mar_100_Gloves": 0.5197, "eval_mar_100_Goggles": 0.5281, "eval_mar_100_Mask": 0.5346, "eval_mar_large": 0.8247, "eval_mar_medium": 0.5002, "eval_mar_small": 0.4574, "eval_runtime": 0.9122, "eval_samples_per_second": 31.793, "eval_steps_per_second": 2.193, "step": 11625 }, { "epoch": 93.008, "grad_norm": 20.88857650756836, "learning_rate": 3.9333333333333335e-06, "loss": 0.7019, "step": 11626 }, { "epoch": 93.016, "grad_norm": 16.194246292114258, "learning_rate": 3.928888888888889e-06, "loss": 0.9145, "step": 11627 }, { "epoch": 93.024, "grad_norm": 41.423011779785156, "learning_rate": 3.924444444444445e-06, "loss": 0.6078, "step": 11628 }, { "epoch": 93.032, "grad_norm": 11.768953323364258, "learning_rate": 3.92e-06, "loss": 0.7764, "step": 11629 }, { "epoch": 93.04, "grad_norm": 25.366641998291016, "learning_rate": 3.9155555555555554e-06, "loss": 0.9014, "step": 11630 }, { "epoch": 93.048, "grad_norm": 77.02589416503906, "learning_rate": 3.911111111111111e-06, "loss": 2.2583, "step": 11631 }, { "epoch": 93.056, "grad_norm": 30.493154525756836, "learning_rate": 3.906666666666667e-06, "loss": 0.6573, "step": 11632 }, { "epoch": 93.064, "grad_norm": 56.98748016357422, "learning_rate": 3.9022222222222225e-06, "loss": 0.4537, "step": 11633 }, { "epoch": 93.072, "grad_norm": 22.88761329650879, "learning_rate": 3.897777777777778e-06, "loss": 0.9626, "step": 11634 }, { "epoch": 93.08, "grad_norm": 17.311338424682617, "learning_rate": 3.893333333333334e-06, "loss": 1.0299, "step": 11635 }, { "epoch": 93.088, "grad_norm": 23.67597770690918, "learning_rate": 3.888888888888889e-06, "loss": 0.9101, "step": 11636 }, { "epoch": 93.096, "grad_norm": 9.959199905395508, "learning_rate": 3.8844444444444444e-06, "loss": 0.596, "step": 11637 }, { "epoch": 93.104, "grad_norm": 17.82994842529297, "learning_rate": 3.88e-06, "loss": 0.4881, "step": 11638 }, { "epoch": 93.112, "grad_norm": 20.030237197875977, "learning_rate": 3.875555555555556e-06, "loss": 0.7268, "step": 11639 }, { "epoch": 93.12, "grad_norm": 29.17217445373535, "learning_rate": 3.8711111111111115e-06, "loss": 0.7965, "step": 11640 }, { "epoch": 93.128, "grad_norm": 25.77948760986328, "learning_rate": 3.866666666666667e-06, "loss": 0.9493, "step": 11641 }, { "epoch": 93.136, "grad_norm": 35.47232437133789, "learning_rate": 3.862222222222223e-06, "loss": 0.6621, "step": 11642 }, { "epoch": 93.144, "grad_norm": 16.81330108642578, "learning_rate": 3.857777777777779e-06, "loss": 0.6638, "step": 11643 }, { "epoch": 93.152, "grad_norm": 18.854812622070312, "learning_rate": 3.8533333333333334e-06, "loss": 1.7349, "step": 11644 }, { "epoch": 93.16, "grad_norm": 12.677111625671387, "learning_rate": 3.848888888888889e-06, "loss": 0.5547, "step": 11645 }, { "epoch": 93.168, "grad_norm": 97.35076904296875, "learning_rate": 3.844444444444445e-06, "loss": 0.6114, "step": 11646 }, { "epoch": 93.176, "grad_norm": 23.03171157836914, "learning_rate": 3.84e-06, "loss": 0.8459, "step": 11647 }, { "epoch": 93.184, "grad_norm": 20.598012924194336, "learning_rate": 3.835555555555555e-06, "loss": 0.8127, "step": 11648 }, { "epoch": 93.192, "grad_norm": 19.011184692382812, "learning_rate": 3.831111111111111e-06, "loss": 0.8869, "step": 11649 }, { "epoch": 93.2, "grad_norm": 15.73469066619873, "learning_rate": 3.826666666666667e-06, "loss": 1.0794, "step": 11650 }, { "epoch": 93.208, "grad_norm": 36.78208541870117, "learning_rate": 3.8222222222222224e-06, "loss": 0.8915, "step": 11651 }, { "epoch": 93.216, "grad_norm": 36.76395797729492, "learning_rate": 3.817777777777778e-06, "loss": 0.6974, "step": 11652 }, { "epoch": 93.224, "grad_norm": 21.46438980102539, "learning_rate": 3.8133333333333334e-06, "loss": 0.7141, "step": 11653 }, { "epoch": 93.232, "grad_norm": 38.87773895263672, "learning_rate": 3.808888888888889e-06, "loss": 0.6026, "step": 11654 }, { "epoch": 93.24, "grad_norm": 18.658126831054688, "learning_rate": 3.8044444444444443e-06, "loss": 0.8126, "step": 11655 }, { "epoch": 93.248, "grad_norm": 22.730512619018555, "learning_rate": 3.8e-06, "loss": 1.2698, "step": 11656 }, { "epoch": 93.256, "grad_norm": 46.346435546875, "learning_rate": 3.7955555555555557e-06, "loss": 0.8935, "step": 11657 }, { "epoch": 93.264, "grad_norm": 78.19539642333984, "learning_rate": 3.7911111111111114e-06, "loss": 0.5789, "step": 11658 }, { "epoch": 93.272, "grad_norm": 27.068540573120117, "learning_rate": 3.7866666666666667e-06, "loss": 0.936, "step": 11659 }, { "epoch": 93.28, "grad_norm": 12.067384719848633, "learning_rate": 3.7822222222222224e-06, "loss": 0.6034, "step": 11660 }, { "epoch": 93.288, "grad_norm": 45.10688400268555, "learning_rate": 3.777777777777778e-06, "loss": 0.6936, "step": 11661 }, { "epoch": 93.296, "grad_norm": 35.09569549560547, "learning_rate": 3.7733333333333338e-06, "loss": 0.7616, "step": 11662 }, { "epoch": 93.304, "grad_norm": 44.48383712768555, "learning_rate": 3.768888888888889e-06, "loss": 1.1035, "step": 11663 }, { "epoch": 93.312, "grad_norm": 17.114917755126953, "learning_rate": 3.7644444444444447e-06, "loss": 0.7337, "step": 11664 }, { "epoch": 93.32, "grad_norm": 188.7429962158203, "learning_rate": 3.7600000000000004e-06, "loss": 0.6764, "step": 11665 }, { "epoch": 93.328, "grad_norm": 35.55832290649414, "learning_rate": 3.755555555555556e-06, "loss": 0.5208, "step": 11666 }, { "epoch": 93.336, "grad_norm": 14.055545806884766, "learning_rate": 3.7511111111111114e-06, "loss": 0.6089, "step": 11667 }, { "epoch": 93.344, "grad_norm": 23.689807891845703, "learning_rate": 3.746666666666667e-06, "loss": 0.9124, "step": 11668 }, { "epoch": 93.352, "grad_norm": 58.431373596191406, "learning_rate": 3.7422222222222228e-06, "loss": 0.9684, "step": 11669 }, { "epoch": 93.36, "grad_norm": 28.318973541259766, "learning_rate": 3.737777777777778e-06, "loss": 0.6712, "step": 11670 }, { "epoch": 93.368, "grad_norm": 29.193204879760742, "learning_rate": 3.7333333333333337e-06, "loss": 0.9022, "step": 11671 }, { "epoch": 93.376, "grad_norm": 22.9580020904541, "learning_rate": 3.7288888888888894e-06, "loss": 0.7238, "step": 11672 }, { "epoch": 93.384, "grad_norm": 18.32887077331543, "learning_rate": 3.7244444444444443e-06, "loss": 1.0103, "step": 11673 }, { "epoch": 93.392, "grad_norm": 38.8145637512207, "learning_rate": 3.72e-06, "loss": 0.9679, "step": 11674 }, { "epoch": 93.4, "grad_norm": 53.03114700317383, "learning_rate": 3.7155555555555557e-06, "loss": 1.201, "step": 11675 }, { "epoch": 93.408, "grad_norm": 32.779666900634766, "learning_rate": 3.711111111111111e-06, "loss": 1.225, "step": 11676 }, { "epoch": 93.416, "grad_norm": 29.273374557495117, "learning_rate": 3.7066666666666666e-06, "loss": 1.0158, "step": 11677 }, { "epoch": 93.424, "grad_norm": 32.00001525878906, "learning_rate": 3.7022222222222223e-06, "loss": 0.5852, "step": 11678 }, { "epoch": 93.432, "grad_norm": 20.021696090698242, "learning_rate": 3.697777777777778e-06, "loss": 0.9836, "step": 11679 }, { "epoch": 93.44, "grad_norm": 19.662046432495117, "learning_rate": 3.6933333333333333e-06, "loss": 0.7942, "step": 11680 }, { "epoch": 93.448, "grad_norm": 14.761152267456055, "learning_rate": 3.688888888888889e-06, "loss": 0.6452, "step": 11681 }, { "epoch": 93.456, "grad_norm": 26.009990692138672, "learning_rate": 3.6844444444444446e-06, "loss": 0.6593, "step": 11682 }, { "epoch": 93.464, "grad_norm": 30.305574417114258, "learning_rate": 3.68e-06, "loss": 0.6622, "step": 11683 }, { "epoch": 93.472, "grad_norm": 21.144330978393555, "learning_rate": 3.6755555555555556e-06, "loss": 0.8846, "step": 11684 }, { "epoch": 93.48, "grad_norm": 22.345258712768555, "learning_rate": 3.6711111111111113e-06, "loss": 0.7021, "step": 11685 }, { "epoch": 93.488, "grad_norm": 18.662063598632812, "learning_rate": 3.666666666666667e-06, "loss": 0.4928, "step": 11686 }, { "epoch": 93.496, "grad_norm": 18.99493408203125, "learning_rate": 3.6622222222222223e-06, "loss": 0.7586, "step": 11687 }, { "epoch": 93.504, "grad_norm": 22.965173721313477, "learning_rate": 3.657777777777778e-06, "loss": 0.8223, "step": 11688 }, { "epoch": 93.512, "grad_norm": 17.556779861450195, "learning_rate": 3.6533333333333336e-06, "loss": 0.6693, "step": 11689 }, { "epoch": 93.52, "grad_norm": 28.518753051757812, "learning_rate": 3.6488888888888893e-06, "loss": 0.5684, "step": 11690 }, { "epoch": 93.528, "grad_norm": 14.519168853759766, "learning_rate": 3.6444444444444446e-06, "loss": 0.8617, "step": 11691 }, { "epoch": 93.536, "grad_norm": 64.19281768798828, "learning_rate": 3.6400000000000003e-06, "loss": 1.0015, "step": 11692 }, { "epoch": 93.544, "grad_norm": 17.011995315551758, "learning_rate": 3.635555555555556e-06, "loss": 1.0649, "step": 11693 }, { "epoch": 93.552, "grad_norm": 110.97251892089844, "learning_rate": 3.6311111111111117e-06, "loss": 1.2873, "step": 11694 }, { "epoch": 93.56, "grad_norm": 20.705158233642578, "learning_rate": 3.626666666666667e-06, "loss": 0.8587, "step": 11695 }, { "epoch": 93.568, "grad_norm": 25.25958824157715, "learning_rate": 3.6222222222222226e-06, "loss": 1.5765, "step": 11696 }, { "epoch": 93.576, "grad_norm": 48.33442687988281, "learning_rate": 3.6177777777777783e-06, "loss": 1.0532, "step": 11697 }, { "epoch": 93.584, "grad_norm": 26.819656372070312, "learning_rate": 3.613333333333334e-06, "loss": 0.8204, "step": 11698 }, { "epoch": 93.592, "grad_norm": 16.895036697387695, "learning_rate": 3.608888888888889e-06, "loss": 0.9889, "step": 11699 }, { "epoch": 93.6, "grad_norm": 30.2108154296875, "learning_rate": 3.604444444444444e-06, "loss": 0.6923, "step": 11700 }, { "epoch": 93.608, "grad_norm": 17.832202911376953, "learning_rate": 3.6e-06, "loss": 0.6874, "step": 11701 }, { "epoch": 93.616, "grad_norm": 11.129972457885742, "learning_rate": 3.5955555555555555e-06, "loss": 0.9999, "step": 11702 }, { "epoch": 93.624, "grad_norm": 31.681447982788086, "learning_rate": 3.5911111111111112e-06, "loss": 0.7694, "step": 11703 }, { "epoch": 93.632, "grad_norm": 25.345535278320312, "learning_rate": 3.5866666666666665e-06, "loss": 0.8025, "step": 11704 }, { "epoch": 93.64, "grad_norm": 13.424505233764648, "learning_rate": 3.582222222222222e-06, "loss": 0.8856, "step": 11705 }, { "epoch": 93.648, "grad_norm": 23.13223648071289, "learning_rate": 3.577777777777778e-06, "loss": 0.6657, "step": 11706 }, { "epoch": 93.656, "grad_norm": 19.60457992553711, "learning_rate": 3.5733333333333336e-06, "loss": 0.8169, "step": 11707 }, { "epoch": 93.664, "grad_norm": 66.1398696899414, "learning_rate": 3.568888888888889e-06, "loss": 0.6817, "step": 11708 }, { "epoch": 93.672, "grad_norm": 17.06294059753418, "learning_rate": 3.5644444444444445e-06, "loss": 0.6733, "step": 11709 }, { "epoch": 93.68, "grad_norm": 14.30293083190918, "learning_rate": 3.5600000000000002e-06, "loss": 0.7181, "step": 11710 }, { "epoch": 93.688, "grad_norm": 23.895946502685547, "learning_rate": 3.555555555555556e-06, "loss": 0.7662, "step": 11711 }, { "epoch": 93.696, "grad_norm": 26.11070442199707, "learning_rate": 3.551111111111111e-06, "loss": 0.7901, "step": 11712 }, { "epoch": 93.704, "grad_norm": 21.637252807617188, "learning_rate": 3.546666666666667e-06, "loss": 0.5147, "step": 11713 }, { "epoch": 93.712, "grad_norm": 30.27178382873535, "learning_rate": 3.5422222222222226e-06, "loss": 0.7808, "step": 11714 }, { "epoch": 93.72, "grad_norm": 20.58403205871582, "learning_rate": 3.5377777777777783e-06, "loss": 0.5849, "step": 11715 }, { "epoch": 93.728, "grad_norm": 30.429672241210938, "learning_rate": 3.5333333333333335e-06, "loss": 0.9953, "step": 11716 }, { "epoch": 93.736, "grad_norm": 29.31330108642578, "learning_rate": 3.5288888888888892e-06, "loss": 1.6869, "step": 11717 }, { "epoch": 93.744, "grad_norm": 482.89837646484375, "learning_rate": 3.524444444444445e-06, "loss": 0.556, "step": 11718 }, { "epoch": 93.752, "grad_norm": 16.139524459838867, "learning_rate": 3.52e-06, "loss": 1.1717, "step": 11719 }, { "epoch": 93.76, "grad_norm": 32.49583435058594, "learning_rate": 3.515555555555556e-06, "loss": 1.7251, "step": 11720 }, { "epoch": 93.768, "grad_norm": 40.31348419189453, "learning_rate": 3.5111111111111116e-06, "loss": 0.6888, "step": 11721 }, { "epoch": 93.776, "grad_norm": 15.991684913635254, "learning_rate": 3.5066666666666673e-06, "loss": 0.8504, "step": 11722 }, { "epoch": 93.784, "grad_norm": 36.29106140136719, "learning_rate": 3.5022222222222225e-06, "loss": 1.0061, "step": 11723 }, { "epoch": 93.792, "grad_norm": 138.13047790527344, "learning_rate": 3.4977777777777782e-06, "loss": 0.8169, "step": 11724 }, { "epoch": 93.8, "grad_norm": 35.20329666137695, "learning_rate": 3.493333333333333e-06, "loss": 0.7597, "step": 11725 }, { "epoch": 93.808, "grad_norm": 21.543188095092773, "learning_rate": 3.4888888888888888e-06, "loss": 0.8677, "step": 11726 }, { "epoch": 93.816, "grad_norm": 37.55659484863281, "learning_rate": 3.4844444444444444e-06, "loss": 0.7436, "step": 11727 }, { "epoch": 93.824, "grad_norm": 27.647319793701172, "learning_rate": 3.4799999999999997e-06, "loss": 0.779, "step": 11728 }, { "epoch": 93.832, "grad_norm": 16.467174530029297, "learning_rate": 3.4755555555555554e-06, "loss": 0.5288, "step": 11729 }, { "epoch": 93.84, "grad_norm": 36.937217712402344, "learning_rate": 3.471111111111111e-06, "loss": 0.4645, "step": 11730 }, { "epoch": 93.848, "grad_norm": 40.49991989135742, "learning_rate": 3.466666666666667e-06, "loss": 0.7041, "step": 11731 }, { "epoch": 93.856, "grad_norm": 39.537635803222656, "learning_rate": 3.462222222222222e-06, "loss": 0.5793, "step": 11732 }, { "epoch": 93.864, "grad_norm": 21.933090209960938, "learning_rate": 3.4577777777777778e-06, "loss": 0.8792, "step": 11733 }, { "epoch": 93.872, "grad_norm": 36.319583892822266, "learning_rate": 3.4533333333333334e-06, "loss": 0.8553, "step": 11734 }, { "epoch": 93.88, "grad_norm": 12.503832817077637, "learning_rate": 3.448888888888889e-06, "loss": 0.9448, "step": 11735 }, { "epoch": 93.888, "grad_norm": 24.085304260253906, "learning_rate": 3.4444444444444444e-06, "loss": 0.6009, "step": 11736 }, { "epoch": 93.896, "grad_norm": 21.139442443847656, "learning_rate": 3.44e-06, "loss": 0.7567, "step": 11737 }, { "epoch": 93.904, "grad_norm": 34.238624572753906, "learning_rate": 3.435555555555556e-06, "loss": 0.6929, "step": 11738 }, { "epoch": 93.912, "grad_norm": 18.740427017211914, "learning_rate": 3.4311111111111115e-06, "loss": 0.6749, "step": 11739 }, { "epoch": 93.92, "grad_norm": 74.07630920410156, "learning_rate": 3.4266666666666668e-06, "loss": 2.7433, "step": 11740 }, { "epoch": 93.928, "grad_norm": 32.90626525878906, "learning_rate": 3.4222222222222224e-06, "loss": 0.946, "step": 11741 }, { "epoch": 93.936, "grad_norm": 36.785404205322266, "learning_rate": 3.417777777777778e-06, "loss": 0.8588, "step": 11742 }, { "epoch": 93.944, "grad_norm": 18.969526290893555, "learning_rate": 3.413333333333334e-06, "loss": 0.832, "step": 11743 }, { "epoch": 93.952, "grad_norm": 16.81471061706543, "learning_rate": 3.408888888888889e-06, "loss": 0.6308, "step": 11744 }, { "epoch": 93.96, "grad_norm": 25.301401138305664, "learning_rate": 3.404444444444445e-06, "loss": 0.6535, "step": 11745 }, { "epoch": 93.968, "grad_norm": 23.81830406188965, "learning_rate": 3.4000000000000005e-06, "loss": 0.8081, "step": 11746 }, { "epoch": 93.976, "grad_norm": 24.87877655029297, "learning_rate": 3.395555555555556e-06, "loss": 0.8557, "step": 11747 }, { "epoch": 93.984, "grad_norm": 33.63627243041992, "learning_rate": 3.3911111111111114e-06, "loss": 1.0201, "step": 11748 }, { "epoch": 93.992, "grad_norm": 18.885868072509766, "learning_rate": 3.386666666666667e-06, "loss": 0.6727, "step": 11749 }, { "epoch": 94.0, "grad_norm": 21.747894287109375, "learning_rate": 3.382222222222222e-06, "loss": 0.9224, "step": 11750 }, { "epoch": 94.0, "eval_loss": 0.8956433534622192, "eval_map": 0.4988, "eval_map_50": 0.8421, "eval_map_75": 0.5306, "eval_map_Coverall": 0.7056, "eval_map_Face_Shield": 0.5866, "eval_map_Gloves": 0.4226, "eval_map_Goggles": 0.3273, "eval_map_Mask": 0.4519, "eval_map_large": 0.7168, "eval_map_medium": 0.3553, "eval_map_small": 0.4199, "eval_mar_1": 0.3702, "eval_mar_10": 0.6084, "eval_mar_100": 0.6212, "eval_mar_100_Coverall": 0.7711, "eval_mar_100_Face_Shield": 0.7059, "eval_mar_100_Gloves": 0.5443, "eval_mar_100_Goggles": 0.55, "eval_mar_100_Mask": 0.5346, "eval_mar_large": 0.8293, "eval_mar_medium": 0.488, "eval_mar_small": 0.474, "eval_runtime": 0.9119, "eval_samples_per_second": 31.802, "eval_steps_per_second": 2.193, "step": 11750 }, { "epoch": 94.008, "grad_norm": 27.63968276977539, "learning_rate": 3.3777777777777777e-06, "loss": 0.9906, "step": 11751 }, { "epoch": 94.016, "grad_norm": 18.150461196899414, "learning_rate": 3.3733333333333334e-06, "loss": 0.8389, "step": 11752 }, { "epoch": 94.024, "grad_norm": 22.888996124267578, "learning_rate": 3.3688888888888886e-06, "loss": 0.8891, "step": 11753 }, { "epoch": 94.032, "grad_norm": 16.381256103515625, "learning_rate": 3.3644444444444443e-06, "loss": 0.5962, "step": 11754 }, { "epoch": 94.04, "grad_norm": 16.977764129638672, "learning_rate": 3.36e-06, "loss": 0.5872, "step": 11755 }, { "epoch": 94.048, "grad_norm": 28.70285987854004, "learning_rate": 3.3555555555555557e-06, "loss": 0.8451, "step": 11756 }, { "epoch": 94.056, "grad_norm": 39.28579330444336, "learning_rate": 3.351111111111111e-06, "loss": 0.8826, "step": 11757 }, { "epoch": 94.064, "grad_norm": 33.916664123535156, "learning_rate": 3.3466666666666667e-06, "loss": 0.7975, "step": 11758 }, { "epoch": 94.072, "grad_norm": 23.045061111450195, "learning_rate": 3.3422222222222224e-06, "loss": 0.725, "step": 11759 }, { "epoch": 94.08, "grad_norm": 24.983428955078125, "learning_rate": 3.337777777777778e-06, "loss": 0.6115, "step": 11760 }, { "epoch": 94.088, "grad_norm": 23.75670051574707, "learning_rate": 3.3333333333333333e-06, "loss": 0.8133, "step": 11761 }, { "epoch": 94.096, "grad_norm": 17.399099349975586, "learning_rate": 3.328888888888889e-06, "loss": 0.6531, "step": 11762 }, { "epoch": 94.104, "grad_norm": 34.07094955444336, "learning_rate": 3.3244444444444447e-06, "loss": 0.6973, "step": 11763 }, { "epoch": 94.112, "grad_norm": 12.947534561157227, "learning_rate": 3.3200000000000004e-06, "loss": 0.7692, "step": 11764 }, { "epoch": 94.12, "grad_norm": 68.51683807373047, "learning_rate": 3.3155555555555557e-06, "loss": 0.4666, "step": 11765 }, { "epoch": 94.128, "grad_norm": 28.668912887573242, "learning_rate": 3.3111111111111114e-06, "loss": 1.0267, "step": 11766 }, { "epoch": 94.136, "grad_norm": 23.56131935119629, "learning_rate": 3.306666666666667e-06, "loss": 0.655, "step": 11767 }, { "epoch": 94.144, "grad_norm": 31.390336990356445, "learning_rate": 3.3022222222222223e-06, "loss": 0.621, "step": 11768 }, { "epoch": 94.152, "grad_norm": 13.25314998626709, "learning_rate": 3.297777777777778e-06, "loss": 0.8682, "step": 11769 }, { "epoch": 94.16, "grad_norm": 20.08692741394043, "learning_rate": 3.2933333333333337e-06, "loss": 0.719, "step": 11770 }, { "epoch": 94.168, "grad_norm": 49.08338928222656, "learning_rate": 3.2888888888888894e-06, "loss": 0.5287, "step": 11771 }, { "epoch": 94.176, "grad_norm": 15.106826782226562, "learning_rate": 3.2844444444444447e-06, "loss": 1.1569, "step": 11772 }, { "epoch": 94.184, "grad_norm": 17.663406372070312, "learning_rate": 3.2800000000000004e-06, "loss": 0.7691, "step": 11773 }, { "epoch": 94.192, "grad_norm": 19.307666778564453, "learning_rate": 3.275555555555556e-06, "loss": 0.9957, "step": 11774 }, { "epoch": 94.2, "grad_norm": 29.965097427368164, "learning_rate": 3.2711111111111117e-06, "loss": 1.1338, "step": 11775 }, { "epoch": 94.208, "grad_norm": 22.262828826904297, "learning_rate": 3.2666666666666666e-06, "loss": 0.5258, "step": 11776 }, { "epoch": 94.216, "grad_norm": 19.406469345092773, "learning_rate": 3.262222222222222e-06, "loss": 0.8931, "step": 11777 }, { "epoch": 94.224, "grad_norm": 22.134197235107422, "learning_rate": 3.2577777777777776e-06, "loss": 0.6977, "step": 11778 }, { "epoch": 94.232, "grad_norm": 199.1884765625, "learning_rate": 3.2533333333333332e-06, "loss": 0.633, "step": 11779 }, { "epoch": 94.24, "grad_norm": 29.622371673583984, "learning_rate": 3.248888888888889e-06, "loss": 0.5864, "step": 11780 }, { "epoch": 94.248, "grad_norm": 34.09543991088867, "learning_rate": 3.244444444444444e-06, "loss": 0.7504, "step": 11781 }, { "epoch": 94.256, "grad_norm": 22.3912353515625, "learning_rate": 3.24e-06, "loss": 0.8234, "step": 11782 }, { "epoch": 94.264, "grad_norm": 32.255008697509766, "learning_rate": 3.2355555555555556e-06, "loss": 0.6545, "step": 11783 }, { "epoch": 94.272, "grad_norm": 95.5281982421875, "learning_rate": 3.2311111111111113e-06, "loss": 0.6755, "step": 11784 }, { "epoch": 94.28, "grad_norm": 18.934253692626953, "learning_rate": 3.2266666666666665e-06, "loss": 0.6931, "step": 11785 }, { "epoch": 94.288, "grad_norm": 16.95374870300293, "learning_rate": 3.2222222222222222e-06, "loss": 0.9769, "step": 11786 }, { "epoch": 94.296, "grad_norm": 47.86785888671875, "learning_rate": 3.217777777777778e-06, "loss": 1.0311, "step": 11787 }, { "epoch": 94.304, "grad_norm": 98.17369079589844, "learning_rate": 3.2133333333333336e-06, "loss": 1.0328, "step": 11788 }, { "epoch": 94.312, "grad_norm": 21.945642471313477, "learning_rate": 3.208888888888889e-06, "loss": 0.5425, "step": 11789 }, { "epoch": 94.32, "grad_norm": 12.495945930480957, "learning_rate": 3.2044444444444446e-06, "loss": 0.5662, "step": 11790 }, { "epoch": 94.328, "grad_norm": 30.283117294311523, "learning_rate": 3.2000000000000003e-06, "loss": 0.8708, "step": 11791 }, { "epoch": 94.336, "grad_norm": 40.454315185546875, "learning_rate": 3.195555555555556e-06, "loss": 0.8748, "step": 11792 }, { "epoch": 94.344, "grad_norm": 14.978402137756348, "learning_rate": 3.1911111111111112e-06, "loss": 0.7677, "step": 11793 }, { "epoch": 94.352, "grad_norm": 23.919498443603516, "learning_rate": 3.186666666666667e-06, "loss": 0.531, "step": 11794 }, { "epoch": 94.36, "grad_norm": 53.933406829833984, "learning_rate": 3.1822222222222226e-06, "loss": 1.7591, "step": 11795 }, { "epoch": 94.368, "grad_norm": 30.560327529907227, "learning_rate": 3.1777777777777783e-06, "loss": 0.6227, "step": 11796 }, { "epoch": 94.376, "grad_norm": 31.20339584350586, "learning_rate": 3.1733333333333336e-06, "loss": 0.5799, "step": 11797 }, { "epoch": 94.384, "grad_norm": 18.700342178344727, "learning_rate": 3.1688888888888893e-06, "loss": 0.6766, "step": 11798 }, { "epoch": 94.392, "grad_norm": 24.300792694091797, "learning_rate": 3.164444444444445e-06, "loss": 0.7176, "step": 11799 }, { "epoch": 94.4, "grad_norm": 14.575983047485352, "learning_rate": 3.1600000000000007e-06, "loss": 0.6719, "step": 11800 }, { "epoch": 94.408, "grad_norm": 19.393543243408203, "learning_rate": 3.155555555555556e-06, "loss": 0.9524, "step": 11801 }, { "epoch": 94.416, "grad_norm": 17.02318572998047, "learning_rate": 3.1511111111111108e-06, "loss": 1.0736, "step": 11802 }, { "epoch": 94.424, "grad_norm": 19.59430503845215, "learning_rate": 3.1466666666666665e-06, "loss": 0.4707, "step": 11803 }, { "epoch": 94.432, "grad_norm": 22.08465576171875, "learning_rate": 3.142222222222222e-06, "loss": 0.8778, "step": 11804 }, { "epoch": 94.44, "grad_norm": 27.397199630737305, "learning_rate": 3.137777777777778e-06, "loss": 0.7798, "step": 11805 }, { "epoch": 94.448, "grad_norm": 25.06167984008789, "learning_rate": 3.133333333333333e-06, "loss": 1.0795, "step": 11806 }, { "epoch": 94.456, "grad_norm": 14.673229217529297, "learning_rate": 3.128888888888889e-06, "loss": 1.0002, "step": 11807 }, { "epoch": 94.464, "grad_norm": 13.840703964233398, "learning_rate": 3.1244444444444445e-06, "loss": 0.705, "step": 11808 }, { "epoch": 94.472, "grad_norm": 56.655033111572266, "learning_rate": 3.12e-06, "loss": 0.7432, "step": 11809 }, { "epoch": 94.48, "grad_norm": 19.7542724609375, "learning_rate": 3.1155555555555555e-06, "loss": 0.5287, "step": 11810 }, { "epoch": 94.488, "grad_norm": 27.158864974975586, "learning_rate": 3.111111111111111e-06, "loss": 0.9365, "step": 11811 }, { "epoch": 94.496, "grad_norm": 34.169403076171875, "learning_rate": 3.106666666666667e-06, "loss": 0.7682, "step": 11812 }, { "epoch": 94.504, "grad_norm": 15.160126686096191, "learning_rate": 3.1022222222222225e-06, "loss": 0.8855, "step": 11813 }, { "epoch": 94.512, "grad_norm": 31.775415420532227, "learning_rate": 3.097777777777778e-06, "loss": 0.9956, "step": 11814 }, { "epoch": 94.52, "grad_norm": 68.8611068725586, "learning_rate": 3.0933333333333335e-06, "loss": 0.8546, "step": 11815 }, { "epoch": 94.528, "grad_norm": 321.7279968261719, "learning_rate": 3.088888888888889e-06, "loss": 1.4608, "step": 11816 }, { "epoch": 94.536, "grad_norm": 43.512725830078125, "learning_rate": 3.0844444444444445e-06, "loss": 0.7086, "step": 11817 }, { "epoch": 94.544, "grad_norm": 30.826602935791016, "learning_rate": 3.08e-06, "loss": 0.64, "step": 11818 }, { "epoch": 94.552, "grad_norm": 46.23755645751953, "learning_rate": 3.075555555555556e-06, "loss": 0.9276, "step": 11819 }, { "epoch": 94.56, "grad_norm": 25.05728530883789, "learning_rate": 3.0711111111111115e-06, "loss": 0.5573, "step": 11820 }, { "epoch": 94.568, "grad_norm": 17.63544464111328, "learning_rate": 3.066666666666667e-06, "loss": 0.9109, "step": 11821 }, { "epoch": 94.576, "grad_norm": 28.544912338256836, "learning_rate": 3.062222222222222e-06, "loss": 1.5513, "step": 11822 }, { "epoch": 94.584, "grad_norm": 85.21575927734375, "learning_rate": 3.0577777777777778e-06, "loss": 0.805, "step": 11823 }, { "epoch": 94.592, "grad_norm": 39.95841598510742, "learning_rate": 3.0533333333333335e-06, "loss": 1.0402, "step": 11824 }, { "epoch": 94.6, "grad_norm": 60.266387939453125, "learning_rate": 3.048888888888889e-06, "loss": 0.9391, "step": 11825 }, { "epoch": 94.608, "grad_norm": 63.41637420654297, "learning_rate": 3.0444444444444444e-06, "loss": 1.841, "step": 11826 }, { "epoch": 94.616, "grad_norm": 18.822452545166016, "learning_rate": 3.04e-06, "loss": 0.6796, "step": 11827 }, { "epoch": 94.624, "grad_norm": 23.262178421020508, "learning_rate": 3.035555555555556e-06, "loss": 0.6922, "step": 11828 }, { "epoch": 94.632, "grad_norm": 14.5184326171875, "learning_rate": 3.0311111111111115e-06, "loss": 0.574, "step": 11829 }, { "epoch": 94.64, "grad_norm": 25.482452392578125, "learning_rate": 3.0266666666666668e-06, "loss": 2.5218, "step": 11830 }, { "epoch": 94.648, "grad_norm": 13.027210235595703, "learning_rate": 3.0222222222222225e-06, "loss": 0.8346, "step": 11831 }, { "epoch": 94.656, "grad_norm": 30.527109146118164, "learning_rate": 3.017777777777778e-06, "loss": 0.7813, "step": 11832 }, { "epoch": 94.664, "grad_norm": 20.41301727294922, "learning_rate": 3.0133333333333334e-06, "loss": 0.7571, "step": 11833 }, { "epoch": 94.672, "grad_norm": 38.61952209472656, "learning_rate": 3.0088888888888887e-06, "loss": 0.9686, "step": 11834 }, { "epoch": 94.68, "grad_norm": 20.080081939697266, "learning_rate": 3.0044444444444444e-06, "loss": 0.6847, "step": 11835 }, { "epoch": 94.688, "grad_norm": 21.165143966674805, "learning_rate": 3e-06, "loss": 0.8558, "step": 11836 }, { "epoch": 94.696, "grad_norm": 28.539457321166992, "learning_rate": 2.9955555555555558e-06, "loss": 0.7623, "step": 11837 }, { "epoch": 94.704, "grad_norm": 27.717689514160156, "learning_rate": 2.991111111111111e-06, "loss": 0.7255, "step": 11838 }, { "epoch": 94.712, "grad_norm": 22.22085189819336, "learning_rate": 2.9866666666666667e-06, "loss": 0.9458, "step": 11839 }, { "epoch": 94.72, "grad_norm": 33.029701232910156, "learning_rate": 2.9822222222222224e-06, "loss": 0.7984, "step": 11840 }, { "epoch": 94.728, "grad_norm": 24.321224212646484, "learning_rate": 2.977777777777778e-06, "loss": 0.7207, "step": 11841 }, { "epoch": 94.736, "grad_norm": 39.19069290161133, "learning_rate": 2.9733333333333334e-06, "loss": 1.5208, "step": 11842 }, { "epoch": 94.744, "grad_norm": 33.239410400390625, "learning_rate": 2.968888888888889e-06, "loss": 0.6061, "step": 11843 }, { "epoch": 94.752, "grad_norm": 22.08411979675293, "learning_rate": 2.9644444444444448e-06, "loss": 1.4091, "step": 11844 }, { "epoch": 94.76, "grad_norm": 25.035614013671875, "learning_rate": 2.9600000000000005e-06, "loss": 0.7392, "step": 11845 }, { "epoch": 94.768, "grad_norm": 18.00655174255371, "learning_rate": 2.9555555555555557e-06, "loss": 1.0816, "step": 11846 }, { "epoch": 94.776, "grad_norm": 22.86800765991211, "learning_rate": 2.951111111111111e-06, "loss": 1.1118, "step": 11847 }, { "epoch": 94.784, "grad_norm": 17.795644760131836, "learning_rate": 2.9466666666666667e-06, "loss": 0.9141, "step": 11848 }, { "epoch": 94.792, "grad_norm": 17.30558967590332, "learning_rate": 2.9422222222222224e-06, "loss": 0.7418, "step": 11849 }, { "epoch": 94.8, "grad_norm": 21.762643814086914, "learning_rate": 2.9377777777777776e-06, "loss": 0.8343, "step": 11850 }, { "epoch": 94.808, "grad_norm": 15.517979621887207, "learning_rate": 2.9333333333333333e-06, "loss": 0.5955, "step": 11851 }, { "epoch": 94.816, "grad_norm": 37.625038146972656, "learning_rate": 2.928888888888889e-06, "loss": 0.7822, "step": 11852 }, { "epoch": 94.824, "grad_norm": 21.053674697875977, "learning_rate": 2.9244444444444447e-06, "loss": 0.8162, "step": 11853 }, { "epoch": 94.832, "grad_norm": 24.269685745239258, "learning_rate": 2.92e-06, "loss": 0.8661, "step": 11854 }, { "epoch": 94.84, "grad_norm": 104.52572631835938, "learning_rate": 2.9155555555555557e-06, "loss": 1.9526, "step": 11855 }, { "epoch": 94.848, "grad_norm": 606.9375610351562, "learning_rate": 2.9111111111111114e-06, "loss": 0.6927, "step": 11856 }, { "epoch": 94.856, "grad_norm": 22.077167510986328, "learning_rate": 2.906666666666667e-06, "loss": 0.4769, "step": 11857 }, { "epoch": 94.864, "grad_norm": 15.207608222961426, "learning_rate": 2.9022222222222223e-06, "loss": 0.6195, "step": 11858 }, { "epoch": 94.872, "grad_norm": 17.75575065612793, "learning_rate": 2.897777777777778e-06, "loss": 0.8317, "step": 11859 }, { "epoch": 94.88, "grad_norm": 25.506580352783203, "learning_rate": 2.8933333333333333e-06, "loss": 0.8231, "step": 11860 }, { "epoch": 94.888, "grad_norm": 51.97395324707031, "learning_rate": 2.888888888888889e-06, "loss": 0.9944, "step": 11861 }, { "epoch": 94.896, "grad_norm": 43.14693832397461, "learning_rate": 2.8844444444444447e-06, "loss": 1.1372, "step": 11862 }, { "epoch": 94.904, "grad_norm": 111.85075378417969, "learning_rate": 2.88e-06, "loss": 0.6677, "step": 11863 }, { "epoch": 94.912, "grad_norm": 20.23052978515625, "learning_rate": 2.8755555555555556e-06, "loss": 0.7827, "step": 11864 }, { "epoch": 94.92, "grad_norm": 121.375732421875, "learning_rate": 2.8711111111111113e-06, "loss": 0.8933, "step": 11865 }, { "epoch": 94.928, "grad_norm": 13.694391250610352, "learning_rate": 2.8666666666666666e-06, "loss": 0.678, "step": 11866 }, { "epoch": 94.936, "grad_norm": 24.067466735839844, "learning_rate": 2.8622222222222223e-06, "loss": 0.9272, "step": 11867 }, { "epoch": 94.944, "grad_norm": 24.820751190185547, "learning_rate": 2.857777777777778e-06, "loss": 0.5904, "step": 11868 }, { "epoch": 94.952, "grad_norm": 31.681385040283203, "learning_rate": 2.8533333333333337e-06, "loss": 0.8797, "step": 11869 }, { "epoch": 94.96, "grad_norm": 17.48644256591797, "learning_rate": 2.848888888888889e-06, "loss": 0.9529, "step": 11870 }, { "epoch": 94.968, "grad_norm": 55.839054107666016, "learning_rate": 2.8444444444444446e-06, "loss": 0.7393, "step": 11871 }, { "epoch": 94.976, "grad_norm": 12.284046173095703, "learning_rate": 2.8400000000000003e-06, "loss": 0.5497, "step": 11872 }, { "epoch": 94.984, "grad_norm": 11.030294418334961, "learning_rate": 2.8355555555555556e-06, "loss": 0.5455, "step": 11873 }, { "epoch": 94.992, "grad_norm": 24.681209564208984, "learning_rate": 2.8311111111111113e-06, "loss": 0.7932, "step": 11874 }, { "epoch": 95.0, "grad_norm": 24.366125106811523, "learning_rate": 2.8266666666666666e-06, "loss": 0.8744, "step": 11875 }, { "epoch": 95.0, "eval_loss": 0.897971510887146, "eval_map": 0.488, "eval_map_50": 0.8362, "eval_map_75": 0.4935, "eval_map_Coverall": 0.704, "eval_map_Face_Shield": 0.594, "eval_map_Gloves": 0.4105, "eval_map_Goggles": 0.2907, "eval_map_Mask": 0.4407, "eval_map_large": 0.6838, "eval_map_medium": 0.3394, "eval_map_small": 0.3833, "eval_mar_1": 0.3598, "eval_mar_10": 0.6088, "eval_mar_100": 0.6182, "eval_mar_100_Coverall": 0.7778, "eval_mar_100_Face_Shield": 0.7176, "eval_mar_100_Gloves": 0.5311, "eval_mar_100_Goggles": 0.5281, "eval_mar_100_Mask": 0.5365, "eval_mar_large": 0.813, "eval_mar_medium": 0.4781, "eval_mar_small": 0.4574, "eval_runtime": 0.9107, "eval_samples_per_second": 31.845, "eval_steps_per_second": 2.196, "step": 11875 }, { "epoch": 95.008, "grad_norm": 17.696622848510742, "learning_rate": 2.8222222222222223e-06, "loss": 0.8411, "step": 11876 }, { "epoch": 95.016, "grad_norm": 24.705514907836914, "learning_rate": 2.817777777777778e-06, "loss": 0.5956, "step": 11877 }, { "epoch": 95.024, "grad_norm": 15.466407775878906, "learning_rate": 2.8133333333333336e-06, "loss": 0.6367, "step": 11878 }, { "epoch": 95.032, "grad_norm": 314.6812744140625, "learning_rate": 2.808888888888889e-06, "loss": 0.8696, "step": 11879 }, { "epoch": 95.04, "grad_norm": 18.095882415771484, "learning_rate": 2.8044444444444446e-06, "loss": 0.5902, "step": 11880 }, { "epoch": 95.048, "grad_norm": 36.046634674072266, "learning_rate": 2.8000000000000003e-06, "loss": 0.9293, "step": 11881 }, { "epoch": 95.056, "grad_norm": 28.158479690551758, "learning_rate": 2.7955555555555556e-06, "loss": 0.884, "step": 11882 }, { "epoch": 95.064, "grad_norm": 18.871871948242188, "learning_rate": 2.7911111111111113e-06, "loss": 0.6766, "step": 11883 }, { "epoch": 95.072, "grad_norm": 16.33536148071289, "learning_rate": 2.786666666666667e-06, "loss": 0.7356, "step": 11884 }, { "epoch": 95.08, "grad_norm": 10.177690505981445, "learning_rate": 2.7822222222222226e-06, "loss": 0.7517, "step": 11885 }, { "epoch": 95.088, "grad_norm": 15.781174659729004, "learning_rate": 2.777777777777778e-06, "loss": 1.0112, "step": 11886 }, { "epoch": 95.096, "grad_norm": 45.41170883178711, "learning_rate": 2.773333333333333e-06, "loss": 0.7807, "step": 11887 }, { "epoch": 95.104, "grad_norm": 18.709562301635742, "learning_rate": 2.768888888888889e-06, "loss": 0.6872, "step": 11888 }, { "epoch": 95.112, "grad_norm": 40.458797454833984, "learning_rate": 2.7644444444444446e-06, "loss": 0.6918, "step": 11889 }, { "epoch": 95.12, "grad_norm": 22.512250900268555, "learning_rate": 2.7600000000000003e-06, "loss": 0.7914, "step": 11890 }, { "epoch": 95.128, "grad_norm": 14.66784954071045, "learning_rate": 2.7555555555555555e-06, "loss": 0.677, "step": 11891 }, { "epoch": 95.136, "grad_norm": 35.452972412109375, "learning_rate": 2.7511111111111112e-06, "loss": 0.6187, "step": 11892 }, { "epoch": 95.144, "grad_norm": 22.514507293701172, "learning_rate": 2.746666666666667e-06, "loss": 0.8806, "step": 11893 }, { "epoch": 95.152, "grad_norm": 32.1456413269043, "learning_rate": 2.7422222222222226e-06, "loss": 0.7938, "step": 11894 }, { "epoch": 95.16, "grad_norm": 17.632204055786133, "learning_rate": 2.737777777777778e-06, "loss": 0.7575, "step": 11895 }, { "epoch": 95.168, "grad_norm": 23.129117965698242, "learning_rate": 2.7333333333333336e-06, "loss": 1.096, "step": 11896 }, { "epoch": 95.176, "grad_norm": 17.527740478515625, "learning_rate": 2.7288888888888893e-06, "loss": 0.7934, "step": 11897 }, { "epoch": 95.184, "grad_norm": 25.311256408691406, "learning_rate": 2.724444444444445e-06, "loss": 0.6423, "step": 11898 }, { "epoch": 95.192, "grad_norm": 20.142742156982422, "learning_rate": 2.72e-06, "loss": 0.9464, "step": 11899 }, { "epoch": 95.2, "grad_norm": 26.52008819580078, "learning_rate": 2.7155555555555555e-06, "loss": 0.7117, "step": 11900 }, { "epoch": 95.208, "grad_norm": 21.30486297607422, "learning_rate": 2.711111111111111e-06, "loss": 0.7827, "step": 11901 }, { "epoch": 95.216, "grad_norm": 19.936405181884766, "learning_rate": 2.706666666666667e-06, "loss": 0.6855, "step": 11902 }, { "epoch": 95.224, "grad_norm": 113.1151123046875, "learning_rate": 2.702222222222222e-06, "loss": 0.6132, "step": 11903 }, { "epoch": 95.232, "grad_norm": 26.967939376831055, "learning_rate": 2.697777777777778e-06, "loss": 1.0017, "step": 11904 }, { "epoch": 95.24, "grad_norm": 20.208152770996094, "learning_rate": 2.6933333333333335e-06, "loss": 0.7421, "step": 11905 }, { "epoch": 95.248, "grad_norm": 40.60457229614258, "learning_rate": 2.6888888888888892e-06, "loss": 2.4715, "step": 11906 }, { "epoch": 95.256, "grad_norm": 36.22987365722656, "learning_rate": 2.6844444444444445e-06, "loss": 0.8605, "step": 11907 }, { "epoch": 95.264, "grad_norm": 25.77775764465332, "learning_rate": 2.68e-06, "loss": 0.8203, "step": 11908 }, { "epoch": 95.272, "grad_norm": 15.740729331970215, "learning_rate": 2.675555555555556e-06, "loss": 0.5266, "step": 11909 }, { "epoch": 95.28, "grad_norm": 39.82316207885742, "learning_rate": 2.6711111111111116e-06, "loss": 0.5482, "step": 11910 }, { "epoch": 95.288, "grad_norm": 28.583358764648438, "learning_rate": 2.666666666666667e-06, "loss": 0.5151, "step": 11911 }, { "epoch": 95.296, "grad_norm": 25.556594848632812, "learning_rate": 2.662222222222222e-06, "loss": 0.8337, "step": 11912 }, { "epoch": 95.304, "grad_norm": 36.12855529785156, "learning_rate": 2.657777777777778e-06, "loss": 0.8525, "step": 11913 }, { "epoch": 95.312, "grad_norm": 19.30184555053711, "learning_rate": 2.6533333333333335e-06, "loss": 0.5241, "step": 11914 }, { "epoch": 95.32, "grad_norm": 13.615520477294922, "learning_rate": 2.6488888888888888e-06, "loss": 0.8426, "step": 11915 }, { "epoch": 95.328, "grad_norm": 18.306678771972656, "learning_rate": 2.6444444444444444e-06, "loss": 1.1324, "step": 11916 }, { "epoch": 95.336, "grad_norm": 25.319374084472656, "learning_rate": 2.64e-06, "loss": 0.8973, "step": 11917 }, { "epoch": 95.344, "grad_norm": 182.9239959716797, "learning_rate": 2.635555555555556e-06, "loss": 1.0257, "step": 11918 }, { "epoch": 95.352, "grad_norm": 15.54889965057373, "learning_rate": 2.631111111111111e-06, "loss": 0.9611, "step": 11919 }, { "epoch": 95.36, "grad_norm": 28.78251838684082, "learning_rate": 2.6266666666666668e-06, "loss": 0.664, "step": 11920 }, { "epoch": 95.368, "grad_norm": 23.891183853149414, "learning_rate": 2.6222222222222225e-06, "loss": 0.6484, "step": 11921 }, { "epoch": 95.376, "grad_norm": 16.342788696289062, "learning_rate": 2.617777777777778e-06, "loss": 0.6238, "step": 11922 }, { "epoch": 95.384, "grad_norm": 35.7728157043457, "learning_rate": 2.6133333333333334e-06, "loss": 0.546, "step": 11923 }, { "epoch": 95.392, "grad_norm": 22.428504943847656, "learning_rate": 2.6088888888888887e-06, "loss": 0.6082, "step": 11924 }, { "epoch": 95.4, "grad_norm": 11.375628471374512, "learning_rate": 2.6044444444444444e-06, "loss": 0.6942, "step": 11925 }, { "epoch": 95.408, "grad_norm": 15.866436004638672, "learning_rate": 2.6e-06, "loss": 0.6534, "step": 11926 }, { "epoch": 95.416, "grad_norm": 15.155404090881348, "learning_rate": 2.5955555555555558e-06, "loss": 0.7283, "step": 11927 }, { "epoch": 95.424, "grad_norm": 19.213565826416016, "learning_rate": 2.591111111111111e-06, "loss": 0.8049, "step": 11928 }, { "epoch": 95.432, "grad_norm": 20.40362548828125, "learning_rate": 2.5866666666666667e-06, "loss": 0.7565, "step": 11929 }, { "epoch": 95.44, "grad_norm": 39.02796936035156, "learning_rate": 2.5822222222222224e-06, "loss": 2.0417, "step": 11930 }, { "epoch": 95.448, "grad_norm": 27.806591033935547, "learning_rate": 2.5777777777777777e-06, "loss": 0.6385, "step": 11931 }, { "epoch": 95.456, "grad_norm": 19.539016723632812, "learning_rate": 2.5733333333333334e-06, "loss": 0.5327, "step": 11932 }, { "epoch": 95.464, "grad_norm": 15.403343200683594, "learning_rate": 2.568888888888889e-06, "loss": 0.9274, "step": 11933 }, { "epoch": 95.472, "grad_norm": 17.329147338867188, "learning_rate": 2.5644444444444448e-06, "loss": 0.7447, "step": 11934 }, { "epoch": 95.48, "grad_norm": 32.641632080078125, "learning_rate": 2.56e-06, "loss": 0.9329, "step": 11935 }, { "epoch": 95.488, "grad_norm": 29.321773529052734, "learning_rate": 2.5555555555555557e-06, "loss": 1.258, "step": 11936 }, { "epoch": 95.496, "grad_norm": 23.56381607055664, "learning_rate": 2.551111111111111e-06, "loss": 1.0385, "step": 11937 }, { "epoch": 95.504, "grad_norm": 22.00612449645996, "learning_rate": 2.5466666666666667e-06, "loss": 1.0022, "step": 11938 }, { "epoch": 95.512, "grad_norm": 11.926299095153809, "learning_rate": 2.5422222222222224e-06, "loss": 1.1544, "step": 11939 }, { "epoch": 95.52, "grad_norm": 14.703166961669922, "learning_rate": 2.5377777777777777e-06, "loss": 0.8365, "step": 11940 }, { "epoch": 95.528, "grad_norm": 20.875225067138672, "learning_rate": 2.5333333333333334e-06, "loss": 0.6279, "step": 11941 }, { "epoch": 95.536, "grad_norm": 25.00980567932129, "learning_rate": 2.528888888888889e-06, "loss": 2.2095, "step": 11942 }, { "epoch": 95.544, "grad_norm": 30.12405014038086, "learning_rate": 2.5244444444444447e-06, "loss": 0.994, "step": 11943 }, { "epoch": 95.552, "grad_norm": 17.36410140991211, "learning_rate": 2.52e-06, "loss": 0.9801, "step": 11944 }, { "epoch": 95.56, "grad_norm": 21.433879852294922, "learning_rate": 2.5155555555555557e-06, "loss": 0.6396, "step": 11945 }, { "epoch": 95.568, "grad_norm": 34.41949462890625, "learning_rate": 2.5111111111111114e-06, "loss": 0.5667, "step": 11946 }, { "epoch": 95.576, "grad_norm": 19.592947006225586, "learning_rate": 2.506666666666667e-06, "loss": 0.8651, "step": 11947 }, { "epoch": 95.584, "grad_norm": 29.21000862121582, "learning_rate": 2.5022222222222224e-06, "loss": 0.6716, "step": 11948 }, { "epoch": 95.592, "grad_norm": 24.049800872802734, "learning_rate": 2.497777777777778e-06, "loss": 0.6952, "step": 11949 }, { "epoch": 95.6, "grad_norm": 45.150848388671875, "learning_rate": 2.4933333333333333e-06, "loss": 0.8093, "step": 11950 }, { "epoch": 95.608, "grad_norm": 23.684457778930664, "learning_rate": 2.488888888888889e-06, "loss": 0.5703, "step": 11951 }, { "epoch": 95.616, "grad_norm": 29.33142852783203, "learning_rate": 2.4844444444444443e-06, "loss": 0.6901, "step": 11952 }, { "epoch": 95.624, "grad_norm": 18.258699417114258, "learning_rate": 2.48e-06, "loss": 1.2907, "step": 11953 }, { "epoch": 95.632, "grad_norm": 48.55375289916992, "learning_rate": 2.4755555555555557e-06, "loss": 0.8366, "step": 11954 }, { "epoch": 95.64, "grad_norm": 20.656696319580078, "learning_rate": 2.4711111111111114e-06, "loss": 0.9268, "step": 11955 }, { "epoch": 95.648, "grad_norm": 13.328208923339844, "learning_rate": 2.4666666666666666e-06, "loss": 0.7916, "step": 11956 }, { "epoch": 95.656, "grad_norm": 26.025577545166016, "learning_rate": 2.4622222222222223e-06, "loss": 0.6813, "step": 11957 }, { "epoch": 95.664, "grad_norm": 20.77742576599121, "learning_rate": 2.457777777777778e-06, "loss": 0.9662, "step": 11958 }, { "epoch": 95.672, "grad_norm": 14.687641143798828, "learning_rate": 2.4533333333333337e-06, "loss": 0.8336, "step": 11959 }, { "epoch": 95.68, "grad_norm": 26.761001586914062, "learning_rate": 2.448888888888889e-06, "loss": 0.7959, "step": 11960 }, { "epoch": 95.688, "grad_norm": 23.82579803466797, "learning_rate": 2.4444444444444447e-06, "loss": 0.8437, "step": 11961 }, { "epoch": 95.696, "grad_norm": 21.80435562133789, "learning_rate": 2.4400000000000004e-06, "loss": 0.6885, "step": 11962 }, { "epoch": 95.704, "grad_norm": 19.734020233154297, "learning_rate": 2.4355555555555556e-06, "loss": 0.6401, "step": 11963 }, { "epoch": 95.712, "grad_norm": 37.65705490112305, "learning_rate": 2.431111111111111e-06, "loss": 0.7871, "step": 11964 }, { "epoch": 95.72, "grad_norm": 18.37726593017578, "learning_rate": 2.4266666666666666e-06, "loss": 1.111, "step": 11965 }, { "epoch": 95.728, "grad_norm": 16.90365982055664, "learning_rate": 2.4222222222222223e-06, "loss": 0.8753, "step": 11966 }, { "epoch": 95.736, "grad_norm": 30.81553077697754, "learning_rate": 2.417777777777778e-06, "loss": 0.8711, "step": 11967 }, { "epoch": 95.744, "grad_norm": 45.551063537597656, "learning_rate": 2.4133333333333332e-06, "loss": 0.6563, "step": 11968 }, { "epoch": 95.752, "grad_norm": 47.35304260253906, "learning_rate": 2.408888888888889e-06, "loss": 0.8392, "step": 11969 }, { "epoch": 95.76, "grad_norm": 20.57615852355957, "learning_rate": 2.4044444444444446e-06, "loss": 1.12, "step": 11970 }, { "epoch": 95.768, "grad_norm": 290.1637878417969, "learning_rate": 2.4000000000000003e-06, "loss": 0.9875, "step": 11971 }, { "epoch": 95.776, "grad_norm": 15.955240249633789, "learning_rate": 2.3955555555555556e-06, "loss": 0.9378, "step": 11972 }, { "epoch": 95.784, "grad_norm": 20.770912170410156, "learning_rate": 2.3911111111111113e-06, "loss": 1.0803, "step": 11973 }, { "epoch": 95.792, "grad_norm": 23.664243698120117, "learning_rate": 2.386666666666667e-06, "loss": 0.8857, "step": 11974 }, { "epoch": 95.8, "grad_norm": 16.452564239501953, "learning_rate": 2.3822222222222227e-06, "loss": 0.6487, "step": 11975 }, { "epoch": 95.808, "grad_norm": 18.173368453979492, "learning_rate": 2.377777777777778e-06, "loss": 0.6559, "step": 11976 }, { "epoch": 95.816, "grad_norm": 17.088205337524414, "learning_rate": 2.373333333333333e-06, "loss": 0.6349, "step": 11977 }, { "epoch": 95.824, "grad_norm": 24.682300567626953, "learning_rate": 2.368888888888889e-06, "loss": 0.8959, "step": 11978 }, { "epoch": 95.832, "grad_norm": 73.03694152832031, "learning_rate": 2.3644444444444446e-06, "loss": 0.97, "step": 11979 }, { "epoch": 95.84, "grad_norm": 25.754135131835938, "learning_rate": 2.36e-06, "loss": 0.8062, "step": 11980 }, { "epoch": 95.848, "grad_norm": 31.729734420776367, "learning_rate": 2.3555555555555555e-06, "loss": 0.8728, "step": 11981 }, { "epoch": 95.856, "grad_norm": 14.833914756774902, "learning_rate": 2.3511111111111112e-06, "loss": 0.799, "step": 11982 }, { "epoch": 95.864, "grad_norm": 30.36313819885254, "learning_rate": 2.346666666666667e-06, "loss": 0.6387, "step": 11983 }, { "epoch": 95.872, "grad_norm": 24.27728843688965, "learning_rate": 2.342222222222222e-06, "loss": 0.6526, "step": 11984 }, { "epoch": 95.88, "grad_norm": 20.642086029052734, "learning_rate": 2.337777777777778e-06, "loss": 0.5883, "step": 11985 }, { "epoch": 95.888, "grad_norm": 24.391328811645508, "learning_rate": 2.3333333333333336e-06, "loss": 0.7857, "step": 11986 }, { "epoch": 95.896, "grad_norm": 38.47848892211914, "learning_rate": 2.3288888888888893e-06, "loss": 1.4692, "step": 11987 }, { "epoch": 95.904, "grad_norm": 39.165771484375, "learning_rate": 2.3244444444444445e-06, "loss": 0.641, "step": 11988 }, { "epoch": 95.912, "grad_norm": 16.2095947265625, "learning_rate": 2.32e-06, "loss": 0.5184, "step": 11989 }, { "epoch": 95.92, "grad_norm": 21.059654235839844, "learning_rate": 2.3155555555555555e-06, "loss": 1.9023, "step": 11990 }, { "epoch": 95.928, "grad_norm": 24.686298370361328, "learning_rate": 2.311111111111111e-06, "loss": 0.6258, "step": 11991 }, { "epoch": 95.936, "grad_norm": 25.2595157623291, "learning_rate": 2.306666666666667e-06, "loss": 0.6006, "step": 11992 }, { "epoch": 95.944, "grad_norm": 45.421226501464844, "learning_rate": 2.302222222222222e-06, "loss": 0.9725, "step": 11993 }, { "epoch": 95.952, "grad_norm": 13.904094696044922, "learning_rate": 2.297777777777778e-06, "loss": 0.8545, "step": 11994 }, { "epoch": 95.96, "grad_norm": 34.482540130615234, "learning_rate": 2.2933333333333335e-06, "loss": 1.0542, "step": 11995 }, { "epoch": 95.968, "grad_norm": 22.893770217895508, "learning_rate": 2.2888888888888892e-06, "loss": 0.9745, "step": 11996 }, { "epoch": 95.976, "grad_norm": 15.60818099975586, "learning_rate": 2.2844444444444445e-06, "loss": 0.7792, "step": 11997 }, { "epoch": 95.984, "grad_norm": 24.321430206298828, "learning_rate": 2.28e-06, "loss": 0.727, "step": 11998 }, { "epoch": 95.992, "grad_norm": 15.596057891845703, "learning_rate": 2.275555555555556e-06, "loss": 1.0099, "step": 11999 }, { "epoch": 96.0, "grad_norm": 23.837221145629883, "learning_rate": 2.271111111111111e-06, "loss": 0.7595, "step": 12000 }, { "epoch": 96.0, "eval_loss": 0.9078301787376404, "eval_map": 0.4927, "eval_map_50": 0.8387, "eval_map_75": 0.4954, "eval_map_Coverall": 0.7077, "eval_map_Face_Shield": 0.6051, "eval_map_Gloves": 0.3921, "eval_map_Goggles": 0.3165, "eval_map_Mask": 0.442, "eval_map_large": 0.6742, "eval_map_medium": 0.3444, "eval_map_small": 0.4073, "eval_mar_1": 0.3629, "eval_mar_10": 0.6044, "eval_mar_100": 0.6171, "eval_mar_100_Coverall": 0.7733, "eval_mar_100_Face_Shield": 0.7353, "eval_mar_100_Gloves": 0.5098, "eval_mar_100_Goggles": 0.5344, "eval_mar_100_Mask": 0.5327, "eval_mar_large": 0.7875, "eval_mar_medium": 0.4747, "eval_mar_small": 0.4597, "eval_runtime": 0.9126, "eval_samples_per_second": 31.777, "eval_steps_per_second": 2.192, "step": 12000 }, { "epoch": 96.008, "grad_norm": 20.84703826904297, "learning_rate": 2.266666666666667e-06, "loss": 0.8271, "step": 12001 }, { "epoch": 96.016, "grad_norm": 27.939420700073242, "learning_rate": 2.262222222222222e-06, "loss": 1.2091, "step": 12002 }, { "epoch": 96.024, "grad_norm": 34.861698150634766, "learning_rate": 2.257777777777778e-06, "loss": 0.5703, "step": 12003 }, { "epoch": 96.032, "grad_norm": 21.104700088500977, "learning_rate": 2.2533333333333335e-06, "loss": 0.8824, "step": 12004 }, { "epoch": 96.04, "grad_norm": 29.169984817504883, "learning_rate": 2.2488888888888888e-06, "loss": 0.8843, "step": 12005 }, { "epoch": 96.048, "grad_norm": 20.729793548583984, "learning_rate": 2.2444444444444445e-06, "loss": 0.4535, "step": 12006 }, { "epoch": 96.056, "grad_norm": 28.93807029724121, "learning_rate": 2.24e-06, "loss": 0.6346, "step": 12007 }, { "epoch": 96.064, "grad_norm": 14.050641059875488, "learning_rate": 2.235555555555556e-06, "loss": 0.5349, "step": 12008 }, { "epoch": 96.072, "grad_norm": 25.093231201171875, "learning_rate": 2.231111111111111e-06, "loss": 0.7703, "step": 12009 }, { "epoch": 96.08, "grad_norm": 25.891803741455078, "learning_rate": 2.226666666666667e-06, "loss": 1.0945, "step": 12010 }, { "epoch": 96.088, "grad_norm": 26.399017333984375, "learning_rate": 2.2222222222222225e-06, "loss": 0.453, "step": 12011 }, { "epoch": 96.096, "grad_norm": 24.146865844726562, "learning_rate": 2.217777777777778e-06, "loss": 0.9186, "step": 12012 }, { "epoch": 96.104, "grad_norm": 33.00803756713867, "learning_rate": 2.2133333333333335e-06, "loss": 0.7723, "step": 12013 }, { "epoch": 96.112, "grad_norm": 27.431243896484375, "learning_rate": 2.208888888888889e-06, "loss": 2.6324, "step": 12014 }, { "epoch": 96.12, "grad_norm": 58.96090316772461, "learning_rate": 2.2044444444444444e-06, "loss": 1.0193, "step": 12015 }, { "epoch": 96.128, "grad_norm": 18.99497413635254, "learning_rate": 2.2e-06, "loss": 0.8097, "step": 12016 }, { "epoch": 96.136, "grad_norm": 36.06401824951172, "learning_rate": 2.1955555555555554e-06, "loss": 1.8202, "step": 12017 }, { "epoch": 96.144, "grad_norm": 19.946765899658203, "learning_rate": 2.191111111111111e-06, "loss": 0.7778, "step": 12018 }, { "epoch": 96.152, "grad_norm": 14.922918319702148, "learning_rate": 2.1866666666666668e-06, "loss": 0.831, "step": 12019 }, { "epoch": 96.16, "grad_norm": 25.00929832458496, "learning_rate": 2.1822222222222225e-06, "loss": 1.0806, "step": 12020 }, { "epoch": 96.168, "grad_norm": 29.221210479736328, "learning_rate": 2.1777777777777777e-06, "loss": 0.8534, "step": 12021 }, { "epoch": 96.176, "grad_norm": 27.793581008911133, "learning_rate": 2.1733333333333334e-06, "loss": 0.7431, "step": 12022 }, { "epoch": 96.184, "grad_norm": 24.00486946105957, "learning_rate": 2.168888888888889e-06, "loss": 0.6966, "step": 12023 }, { "epoch": 96.192, "grad_norm": 20.099136352539062, "learning_rate": 2.164444444444445e-06, "loss": 1.0126, "step": 12024 }, { "epoch": 96.2, "grad_norm": 38.296051025390625, "learning_rate": 2.16e-06, "loss": 0.7087, "step": 12025 }, { "epoch": 96.208, "grad_norm": 15.931771278381348, "learning_rate": 2.1555555555555558e-06, "loss": 0.6623, "step": 12026 }, { "epoch": 96.216, "grad_norm": 31.779924392700195, "learning_rate": 2.1511111111111115e-06, "loss": 1.5721, "step": 12027 }, { "epoch": 96.224, "grad_norm": 27.67839813232422, "learning_rate": 2.1466666666666667e-06, "loss": 0.5868, "step": 12028 }, { "epoch": 96.232, "grad_norm": 20.169395446777344, "learning_rate": 2.142222222222222e-06, "loss": 0.9469, "step": 12029 }, { "epoch": 96.24, "grad_norm": 18.198270797729492, "learning_rate": 2.1377777777777777e-06, "loss": 0.3693, "step": 12030 }, { "epoch": 96.248, "grad_norm": 14.616826057434082, "learning_rate": 2.1333333333333334e-06, "loss": 0.9256, "step": 12031 }, { "epoch": 96.256, "grad_norm": 26.008176803588867, "learning_rate": 2.128888888888889e-06, "loss": 0.8381, "step": 12032 }, { "epoch": 96.264, "grad_norm": 17.115266799926758, "learning_rate": 2.1244444444444443e-06, "loss": 0.7206, "step": 12033 }, { "epoch": 96.272, "grad_norm": 33.95196533203125, "learning_rate": 2.12e-06, "loss": 1.0429, "step": 12034 }, { "epoch": 96.28, "grad_norm": 15.435750961303711, "learning_rate": 2.1155555555555557e-06, "loss": 1.0505, "step": 12035 }, { "epoch": 96.288, "grad_norm": 18.828298568725586, "learning_rate": 2.1111111111111114e-06, "loss": 0.9119, "step": 12036 }, { "epoch": 96.296, "grad_norm": 67.00871276855469, "learning_rate": 2.1066666666666667e-06, "loss": 0.6379, "step": 12037 }, { "epoch": 96.304, "grad_norm": 20.526853561401367, "learning_rate": 2.1022222222222224e-06, "loss": 1.066, "step": 12038 }, { "epoch": 96.312, "grad_norm": 26.325519561767578, "learning_rate": 2.097777777777778e-06, "loss": 0.6607, "step": 12039 }, { "epoch": 96.32, "grad_norm": 20.301311492919922, "learning_rate": 2.0933333333333338e-06, "loss": 0.7229, "step": 12040 }, { "epoch": 96.328, "grad_norm": 17.93718910217285, "learning_rate": 2.088888888888889e-06, "loss": 0.6842, "step": 12041 }, { "epoch": 96.336, "grad_norm": 43.163753509521484, "learning_rate": 2.0844444444444443e-06, "loss": 0.7094, "step": 12042 }, { "epoch": 96.344, "grad_norm": 30.324880599975586, "learning_rate": 2.08e-06, "loss": 1.443, "step": 12043 }, { "epoch": 96.352, "grad_norm": 18.578998565673828, "learning_rate": 2.0755555555555557e-06, "loss": 0.7673, "step": 12044 }, { "epoch": 96.36, "grad_norm": 14.877270698547363, "learning_rate": 2.0711111111111114e-06, "loss": 0.6608, "step": 12045 }, { "epoch": 96.368, "grad_norm": 50.76593017578125, "learning_rate": 2.0666666666666666e-06, "loss": 0.7079, "step": 12046 }, { "epoch": 96.376, "grad_norm": 21.88093376159668, "learning_rate": 2.0622222222222223e-06, "loss": 0.8762, "step": 12047 }, { "epoch": 96.384, "grad_norm": 16.320863723754883, "learning_rate": 2.057777777777778e-06, "loss": 0.8604, "step": 12048 }, { "epoch": 96.392, "grad_norm": 16.333681106567383, "learning_rate": 2.0533333333333333e-06, "loss": 0.8791, "step": 12049 }, { "epoch": 96.4, "grad_norm": 25.955657958984375, "learning_rate": 2.048888888888889e-06, "loss": 0.6135, "step": 12050 }, { "epoch": 96.408, "grad_norm": 22.71087074279785, "learning_rate": 2.0444444444444447e-06, "loss": 0.8214, "step": 12051 }, { "epoch": 96.416, "grad_norm": 22.535717010498047, "learning_rate": 2.0400000000000004e-06, "loss": 0.933, "step": 12052 }, { "epoch": 96.424, "grad_norm": 27.009057998657227, "learning_rate": 2.0355555555555556e-06, "loss": 0.8671, "step": 12053 }, { "epoch": 96.432, "grad_norm": 16.055023193359375, "learning_rate": 2.031111111111111e-06, "loss": 0.9042, "step": 12054 }, { "epoch": 96.44, "grad_norm": 18.546096801757812, "learning_rate": 2.0266666666666666e-06, "loss": 0.8059, "step": 12055 }, { "epoch": 96.448, "grad_norm": 56.49435806274414, "learning_rate": 2.0222222222222223e-06, "loss": 1.3337, "step": 12056 }, { "epoch": 96.456, "grad_norm": 22.396007537841797, "learning_rate": 2.017777777777778e-06, "loss": 0.6507, "step": 12057 }, { "epoch": 96.464, "grad_norm": 22.933351516723633, "learning_rate": 2.0133333333333333e-06, "loss": 0.8519, "step": 12058 }, { "epoch": 96.472, "grad_norm": 27.72563934326172, "learning_rate": 2.008888888888889e-06, "loss": 0.7511, "step": 12059 }, { "epoch": 96.48, "grad_norm": 31.430694580078125, "learning_rate": 2.0044444444444446e-06, "loss": 0.6242, "step": 12060 }, { "epoch": 96.488, "grad_norm": 99.24593353271484, "learning_rate": 2.0000000000000003e-06, "loss": 0.5009, "step": 12061 }, { "epoch": 96.496, "grad_norm": 17.845205307006836, "learning_rate": 1.9955555555555556e-06, "loss": 0.8161, "step": 12062 }, { "epoch": 96.504, "grad_norm": 39.282188415527344, "learning_rate": 1.9911111111111113e-06, "loss": 0.6393, "step": 12063 }, { "epoch": 96.512, "grad_norm": 27.074748992919922, "learning_rate": 1.986666666666667e-06, "loss": 0.8101, "step": 12064 }, { "epoch": 96.52, "grad_norm": 10.409846305847168, "learning_rate": 1.9822222222222223e-06, "loss": 0.9006, "step": 12065 }, { "epoch": 96.528, "grad_norm": 27.534337997436523, "learning_rate": 1.9777777777777775e-06, "loss": 1.0772, "step": 12066 }, { "epoch": 96.536, "grad_norm": 15.82668685913086, "learning_rate": 1.9733333333333332e-06, "loss": 1.1817, "step": 12067 }, { "epoch": 96.544, "grad_norm": 32.098472595214844, "learning_rate": 1.968888888888889e-06, "loss": 0.8301, "step": 12068 }, { "epoch": 96.552, "grad_norm": 60.884403228759766, "learning_rate": 1.9644444444444446e-06, "loss": 0.6784, "step": 12069 }, { "epoch": 96.56, "grad_norm": 17.098785400390625, "learning_rate": 1.96e-06, "loss": 0.6488, "step": 12070 }, { "epoch": 96.568, "grad_norm": 21.898656845092773, "learning_rate": 1.9555555555555556e-06, "loss": 0.7999, "step": 12071 }, { "epoch": 96.576, "grad_norm": 25.43730354309082, "learning_rate": 1.9511111111111113e-06, "loss": 0.6249, "step": 12072 }, { "epoch": 96.584, "grad_norm": 15.65902328491211, "learning_rate": 1.946666666666667e-06, "loss": 0.7878, "step": 12073 }, { "epoch": 96.592, "grad_norm": 23.32025146484375, "learning_rate": 1.9422222222222222e-06, "loss": 1.1771, "step": 12074 }, { "epoch": 96.6, "grad_norm": 20.15871810913086, "learning_rate": 1.937777777777778e-06, "loss": 0.5179, "step": 12075 }, { "epoch": 96.608, "grad_norm": 177.97242736816406, "learning_rate": 1.9333333333333336e-06, "loss": 0.7429, "step": 12076 }, { "epoch": 96.616, "grad_norm": 24.299041748046875, "learning_rate": 1.9288888888888893e-06, "loss": 0.3706, "step": 12077 }, { "epoch": 96.624, "grad_norm": 68.49532318115234, "learning_rate": 1.9244444444444446e-06, "loss": 0.6625, "step": 12078 }, { "epoch": 96.632, "grad_norm": 61.6434326171875, "learning_rate": 1.92e-06, "loss": 1.066, "step": 12079 }, { "epoch": 96.64, "grad_norm": 24.463356018066406, "learning_rate": 1.9155555555555555e-06, "loss": 0.6733, "step": 12080 }, { "epoch": 96.648, "grad_norm": 17.81436538696289, "learning_rate": 1.9111111111111112e-06, "loss": 0.6222, "step": 12081 }, { "epoch": 96.656, "grad_norm": 26.458297729492188, "learning_rate": 1.9066666666666667e-06, "loss": 0.6214, "step": 12082 }, { "epoch": 96.664, "grad_norm": 33.07498550415039, "learning_rate": 1.9022222222222222e-06, "loss": 0.9785, "step": 12083 }, { "epoch": 96.672, "grad_norm": 30.18751335144043, "learning_rate": 1.8977777777777779e-06, "loss": 0.8992, "step": 12084 }, { "epoch": 96.68, "grad_norm": 74.44214630126953, "learning_rate": 1.8933333333333333e-06, "loss": 0.4633, "step": 12085 }, { "epoch": 96.688, "grad_norm": 26.271631240844727, "learning_rate": 1.888888888888889e-06, "loss": 0.4589, "step": 12086 }, { "epoch": 96.696, "grad_norm": 21.603530883789062, "learning_rate": 1.8844444444444445e-06, "loss": 0.936, "step": 12087 }, { "epoch": 96.704, "grad_norm": 18.484861373901367, "learning_rate": 1.8800000000000002e-06, "loss": 0.8451, "step": 12088 }, { "epoch": 96.712, "grad_norm": 263.26446533203125, "learning_rate": 1.8755555555555557e-06, "loss": 0.8472, "step": 12089 }, { "epoch": 96.72, "grad_norm": 27.1824893951416, "learning_rate": 1.8711111111111114e-06, "loss": 0.6573, "step": 12090 }, { "epoch": 96.728, "grad_norm": 42.7002067565918, "learning_rate": 1.8666666666666669e-06, "loss": 0.8934, "step": 12091 }, { "epoch": 96.736, "grad_norm": 33.41100311279297, "learning_rate": 1.8622222222222221e-06, "loss": 0.5809, "step": 12092 }, { "epoch": 96.744, "grad_norm": 15.344292640686035, "learning_rate": 1.8577777777777778e-06, "loss": 0.7466, "step": 12093 }, { "epoch": 96.752, "grad_norm": 17.555158615112305, "learning_rate": 1.8533333333333333e-06, "loss": 0.8735, "step": 12094 }, { "epoch": 96.76, "grad_norm": 20.013473510742188, "learning_rate": 1.848888888888889e-06, "loss": 0.7651, "step": 12095 }, { "epoch": 96.768, "grad_norm": 16.17165756225586, "learning_rate": 1.8444444444444445e-06, "loss": 1.0278, "step": 12096 }, { "epoch": 96.776, "grad_norm": 52.77504348754883, "learning_rate": 1.84e-06, "loss": 1.0955, "step": 12097 }, { "epoch": 96.784, "grad_norm": 29.762075424194336, "learning_rate": 1.8355555555555557e-06, "loss": 1.9751, "step": 12098 }, { "epoch": 96.792, "grad_norm": 23.912105560302734, "learning_rate": 1.8311111111111111e-06, "loss": 0.9736, "step": 12099 }, { "epoch": 96.8, "grad_norm": 14.839146614074707, "learning_rate": 1.8266666666666668e-06, "loss": 1.0348, "step": 12100 }, { "epoch": 96.808, "grad_norm": 38.67601013183594, "learning_rate": 1.8222222222222223e-06, "loss": 0.6638, "step": 12101 }, { "epoch": 96.816, "grad_norm": 16.37600326538086, "learning_rate": 1.817777777777778e-06, "loss": 0.6013, "step": 12102 }, { "epoch": 96.824, "grad_norm": 17.63823127746582, "learning_rate": 1.8133333333333335e-06, "loss": 1.1607, "step": 12103 }, { "epoch": 96.832, "grad_norm": 17.519207000732422, "learning_rate": 1.8088888888888892e-06, "loss": 0.9645, "step": 12104 }, { "epoch": 96.84, "grad_norm": 13.356035232543945, "learning_rate": 1.8044444444444444e-06, "loss": 0.5832, "step": 12105 }, { "epoch": 96.848, "grad_norm": 29.173864364624023, "learning_rate": 1.8e-06, "loss": 0.7003, "step": 12106 }, { "epoch": 96.856, "grad_norm": 13.940608024597168, "learning_rate": 1.7955555555555556e-06, "loss": 0.6651, "step": 12107 }, { "epoch": 96.864, "grad_norm": 21.1694393157959, "learning_rate": 1.791111111111111e-06, "loss": 0.6555, "step": 12108 }, { "epoch": 96.872, "grad_norm": 199.64987182617188, "learning_rate": 1.7866666666666668e-06, "loss": 0.6655, "step": 12109 }, { "epoch": 96.88, "grad_norm": 21.235660552978516, "learning_rate": 1.7822222222222223e-06, "loss": 0.92, "step": 12110 }, { "epoch": 96.888, "grad_norm": 14.654531478881836, "learning_rate": 1.777777777777778e-06, "loss": 0.7008, "step": 12111 }, { "epoch": 96.896, "grad_norm": 12.943094253540039, "learning_rate": 1.7733333333333334e-06, "loss": 0.9121, "step": 12112 }, { "epoch": 96.904, "grad_norm": 132.64102172851562, "learning_rate": 1.7688888888888891e-06, "loss": 1.0304, "step": 12113 }, { "epoch": 96.912, "grad_norm": 24.33829689025879, "learning_rate": 1.7644444444444446e-06, "loss": 0.704, "step": 12114 }, { "epoch": 96.92, "grad_norm": 19.178125381469727, "learning_rate": 1.76e-06, "loss": 0.7957, "step": 12115 }, { "epoch": 96.928, "grad_norm": 31.61600112915039, "learning_rate": 1.7555555555555558e-06, "loss": 1.2379, "step": 12116 }, { "epoch": 96.936, "grad_norm": 22.269899368286133, "learning_rate": 1.7511111111111113e-06, "loss": 0.658, "step": 12117 }, { "epoch": 96.944, "grad_norm": 13.648685455322266, "learning_rate": 1.7466666666666665e-06, "loss": 0.7849, "step": 12118 }, { "epoch": 96.952, "grad_norm": 24.61589813232422, "learning_rate": 1.7422222222222222e-06, "loss": 0.6928, "step": 12119 }, { "epoch": 96.96, "grad_norm": 22.44315528869629, "learning_rate": 1.7377777777777777e-06, "loss": 0.768, "step": 12120 }, { "epoch": 96.968, "grad_norm": 26.23053550720215, "learning_rate": 1.7333333333333334e-06, "loss": 0.6998, "step": 12121 }, { "epoch": 96.976, "grad_norm": 34.179813385009766, "learning_rate": 1.7288888888888889e-06, "loss": 0.8687, "step": 12122 }, { "epoch": 96.984, "grad_norm": 22.339113235473633, "learning_rate": 1.7244444444444446e-06, "loss": 1.8491, "step": 12123 }, { "epoch": 96.992, "grad_norm": 14.651497840881348, "learning_rate": 1.72e-06, "loss": 0.644, "step": 12124 }, { "epoch": 97.0, "grad_norm": 99.99242401123047, "learning_rate": 1.7155555555555557e-06, "loss": 0.8602, "step": 12125 }, { "epoch": 97.0, "eval_loss": 0.8912756443023682, "eval_map": 0.4887, "eval_map_50": 0.8353, "eval_map_75": 0.473, "eval_map_Coverall": 0.7, "eval_map_Face_Shield": 0.5987, "eval_map_Gloves": 0.3918, "eval_map_Goggles": 0.3115, "eval_map_Mask": 0.4418, "eval_map_large": 0.679, "eval_map_medium": 0.3492, "eval_map_small": 0.4091, "eval_mar_1": 0.3601, "eval_mar_10": 0.6056, "eval_mar_100": 0.6186, "eval_mar_100_Coverall": 0.7689, "eval_mar_100_Face_Shield": 0.7294, "eval_mar_100_Gloves": 0.523, "eval_mar_100_Goggles": 0.5312, "eval_mar_100_Mask": 0.5404, "eval_mar_large": 0.7882, "eval_mar_medium": 0.4922, "eval_mar_small": 0.4686, "eval_runtime": 0.9065, "eval_samples_per_second": 31.992, "eval_steps_per_second": 2.206, "step": 12125 } ], "logging_steps": 1, "max_steps": 12500, "num_input_tokens_seen": 0, "num_train_epochs": 100, "save_steps": 500, "total_flos": 2.607082460495274e+19, "train_batch_size": 8, "trial_name": null, "trial_params": null }