ppo-Huggy / run_logs /training_status.json
pinkpekoe's picture
Huggy
5ed5b13
{
"Huggy": {
"checkpoints": [
{
"steps": 199633,
"file_path": "results/Huggy/Huggy/Huggy-199633.onnx",
"reward": 3.126919717527926,
"creation_time": 1688414227.7764108,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199633.pt"
]
},
{
"steps": 399880,
"file_path": "results/Huggy/Huggy/Huggy-399880.onnx",
"reward": 3.616960549354553,
"creation_time": 1688414481.170646,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399880.pt"
]
},
{
"steps": 599361,
"file_path": "results/Huggy/Huggy/Huggy-599361.onnx",
"reward": 3.199132204055786,
"creation_time": 1688414737.9184253,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599361.pt"
]
},
{
"steps": 799971,
"file_path": "results/Huggy/Huggy/Huggy-799971.onnx",
"reward": 4.009585527181625,
"creation_time": 1688414991.031934,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799971.pt"
]
},
{
"steps": 999945,
"file_path": "results/Huggy/Huggy/Huggy-999945.onnx",
"reward": 4.080588065167909,
"creation_time": 1688415251.9749212,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999945.pt"
]
},
{
"steps": 1199984,
"file_path": "results/Huggy/Huggy/Huggy-1199984.onnx",
"reward": 3.755522217069353,
"creation_time": 1688415509.0699077,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199984.pt"
]
},
{
"steps": 1399933,
"file_path": "results/Huggy/Huggy/Huggy-1399933.onnx",
"reward": 3.7065288936248932,
"creation_time": 1688415767.8195968,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399933.pt"
]
},
{
"steps": 1599919,
"file_path": "results/Huggy/Huggy/Huggy-1599919.onnx",
"reward": 3.4167518272168107,
"creation_time": 1688416028.5870337,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599919.pt"
]
},
{
"steps": 1799959,
"file_path": "results/Huggy/Huggy/Huggy-1799959.onnx",
"reward": 3.878648983804803,
"creation_time": 1688416292.0841937,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799959.pt"
]
},
{
"steps": 1999622,
"file_path": "results/Huggy/Huggy/Huggy-1999622.onnx",
"reward": null,
"creation_time": 1688416553.6816218,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999622.pt"
]
},
{
"steps": 2000372,
"file_path": "results/Huggy/Huggy/Huggy-2000372.onnx",
"reward": -3.117123603820801,
"creation_time": 1688416553.8328197,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000372.pt"
]
}
],
"final_checkpoint": {
"steps": 2000372,
"file_path": "results/Huggy/Huggy.onnx",
"reward": -3.117123603820801,
"creation_time": 1688416553.8328197,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000372.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}