{ "Huggy": { "checkpoints": [ { "steps": 199862, "file_path": "results/Huggy2/Huggy/Huggy-199862.onnx", "reward": 3.589658560001687, "creation_time": 1730774080.3764265, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-199862.pt" ] }, { "steps": 399900, "file_path": "results/Huggy2/Huggy/Huggy-399900.onnx", "reward": 3.9632623749719538, "creation_time": 1730774327.8020785, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-399900.pt" ] }, { "steps": 599925, "file_path": "results/Huggy2/Huggy/Huggy-599925.onnx", "reward": 3.8018529022994794, "creation_time": 1730774580.9983253, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-599925.pt" ] }, { "steps": 799250, "file_path": "results/Huggy2/Huggy/Huggy-799250.onnx", "reward": 3.919753539562225, "creation_time": 1730774833.367548, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-799250.pt" ] }, { "steps": 999969, "file_path": "results/Huggy2/Huggy/Huggy-999969.onnx", "reward": 3.965879654380637, "creation_time": 1730775092.2089517, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-999969.pt" ] }, { "steps": 1199999, "file_path": "results/Huggy2/Huggy/Huggy-1199999.onnx", "reward": 3.8537183187901976, "creation_time": 1730775343.8608396, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-1199999.pt" ] }, { "steps": 1399983, "file_path": "results/Huggy2/Huggy/Huggy-1399983.onnx", "reward": 3.7812981463846613, "creation_time": 1730775592.0871499, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-1399983.pt" ] }, { "steps": 1599967, "file_path": "results/Huggy2/Huggy/Huggy-1599967.onnx", "reward": 3.821549704020051, "creation_time": 1730775838.4886434, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-1599967.pt" ] }, { "steps": 1799431, "file_path": "results/Huggy2/Huggy/Huggy-1799431.onnx", "reward": 3.466076675118232, "creation_time": 1730776097.372214, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-1799431.pt" ] }, { "steps": 1999501, "file_path": "results/Huggy2/Huggy/Huggy-1999501.onnx", "reward": 4.578010390786564, "creation_time": 1730776352.7003596, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-1999501.pt" ] }, { "steps": 2000251, "file_path": "results/Huggy2/Huggy/Huggy-2000251.onnx", "reward": 4.121830251481798, "creation_time": 1730776352.852844, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-2000251.pt" ] } ], "final_checkpoint": { "steps": 2000251, "file_path": "results/Huggy2/Huggy.onnx", "reward": 4.121830251481798, "creation_time": 1730776352.852844, "auxillary_file_paths": [ "results/Huggy2/Huggy/Huggy-2000251.pt" ] } }, "metadata": { "stats_format_version": "0.3.0", "mlagents_version": "1.2.0.dev0", "torch_version": "2.5.0+cu121" } }