ppo-Huggy / run_logs /training_status.json
Dae314's picture
Initial commit
d6bef87
{
"Huggy": {
"checkpoints": [
{
"steps": 199929,
"file_path": "results/Huggy/Huggy/Huggy-199929.onnx",
"reward": 3.3415445499732845,
"creation_time": 1683076861.8904657,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199929.pt"
]
},
{
"steps": 399883,
"file_path": "results/Huggy/Huggy/Huggy-399883.onnx",
"reward": 3.8052695571330557,
"creation_time": 1683077091.8676012,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399883.pt"
]
},
{
"steps": 599945,
"file_path": "results/Huggy/Huggy/Huggy-599945.onnx",
"reward": 3.52013623714447,
"creation_time": 1683077325.142662,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599945.pt"
]
},
{
"steps": 799997,
"file_path": "results/Huggy/Huggy/Huggy-799997.onnx",
"reward": 3.7404542707123802,
"creation_time": 1683077554.5902188,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799997.pt"
]
},
{
"steps": 999993,
"file_path": "results/Huggy/Huggy/Huggy-999993.onnx",
"reward": 3.8868495633938167,
"creation_time": 1683077788.9933617,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999993.pt"
]
},
{
"steps": 1199911,
"file_path": "results/Huggy/Huggy/Huggy-1199911.onnx",
"reward": 3.782593419972588,
"creation_time": 1683078025.7697089,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199911.pt"
]
},
{
"steps": 1399967,
"file_path": "results/Huggy/Huggy/Huggy-1399967.onnx",
"reward": 4.096260403331957,
"creation_time": 1683078261.435922,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399967.pt"
]
},
{
"steps": 1599988,
"file_path": "results/Huggy/Huggy/Huggy-1599988.onnx",
"reward": 4.006368067529467,
"creation_time": 1683078495.4890425,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599988.pt"
]
},
{
"steps": 1799942,
"file_path": "results/Huggy/Huggy/Huggy-1799942.onnx",
"reward": 3.75701681928697,
"creation_time": 1683078735.2737968,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799942.pt"
]
},
{
"steps": 1999925,
"file_path": "results/Huggy/Huggy/Huggy-1999925.onnx",
"reward": 3.6611822348374585,
"creation_time": 1683078973.2199285,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999925.pt"
]
},
{
"steps": 2000019,
"file_path": "results/Huggy/Huggy/Huggy-2000019.onnx",
"reward": 3.680982661969734,
"creation_time": 1683078973.3463302,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000019.pt"
]
}
],
"final_checkpoint": {
"steps": 2000019,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.680982661969734,
"creation_time": 1683078973.3463302,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000019.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}