ppo-Huggy / run_logs /training_status.json
blkpst's picture
Huggy
79e249c
{
"Huggy": {
"checkpoints": [
{
"steps": 199835,
"file_path": "results/Huggy/Huggy/Huggy-199835.onnx",
"reward": 3.4404318407177925,
"creation_time": 1692754062.789353,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199835.pt"
]
},
{
"steps": 399967,
"file_path": "results/Huggy/Huggy/Huggy-399967.onnx",
"reward": 3.531030190907992,
"creation_time": 1692754302.923248,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399967.pt"
]
},
{
"steps": 599989,
"file_path": "results/Huggy/Huggy/Huggy-599989.onnx",
"reward": 4.089557139769845,
"creation_time": 1692754547.5554273,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599989.pt"
]
},
{
"steps": 799904,
"file_path": "results/Huggy/Huggy/Huggy-799904.onnx",
"reward": 3.3992593901157377,
"creation_time": 1692754836.6929636,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799904.pt"
]
},
{
"steps": 999992,
"file_path": "results/Huggy/Huggy/Huggy-999992.onnx",
"reward": 3.42047372092022,
"creation_time": 1692755103.8634164,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999992.pt"
]
},
{
"steps": 1199911,
"file_path": "results/Huggy/Huggy/Huggy-1199911.onnx",
"reward": 3.5895238656264086,
"creation_time": 1692755350.7458205,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199911.pt"
]
},
{
"steps": 1399899,
"file_path": "results/Huggy/Huggy/Huggy-1399899.onnx",
"reward": 3.6047694995893655,
"creation_time": 1692755593.74,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399899.pt"
]
},
{
"steps": 1599880,
"file_path": "results/Huggy/Huggy/Huggy-1599880.onnx",
"reward": 3.558824708967498,
"creation_time": 1692755849.3818486,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599880.pt"
]
},
{
"steps": 1799668,
"file_path": "results/Huggy/Huggy/Huggy-1799668.onnx",
"reward": 4.372635872467704,
"creation_time": 1692756120.8233476,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799668.pt"
]
},
{
"steps": 1999958,
"file_path": "results/Huggy/Huggy/Huggy-1999958.onnx",
"reward": 3.484016515113212,
"creation_time": 1692756408.4484606,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999958.pt"
]
},
{
"steps": 2000034,
"file_path": "results/Huggy/Huggy/Huggy-2000034.onnx",
"reward": 3.4869110440247812,
"creation_time": 1692756408.5809546,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000034.pt"
]
}
],
"final_checkpoint": {
"steps": 2000034,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.4869110440247812,
"creation_time": 1692756408.5809546,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000034.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}