ppo-Huggy / run_logs /training_status.json
Fixedbot's picture
Huggy
f0e74d0
{
"Huggy": {
"checkpoints": [
{
"steps": 199808,
"file_path": "results/Huggy/Huggy/Huggy-199808.onnx",
"reward": 3.357644829593721,
"creation_time": 1689237384.0643585,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199808.pt"
]
},
{
"steps": 399965,
"file_path": "results/Huggy/Huggy/Huggy-399965.onnx",
"reward": 3.3518445708534936,
"creation_time": 1689237622.328398,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399965.pt"
]
},
{
"steps": 599917,
"file_path": "results/Huggy/Huggy/Huggy-599917.onnx",
"reward": 3.5654510302203044,
"creation_time": 1689237864.0795555,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599917.pt"
]
},
{
"steps": 799989,
"file_path": "results/Huggy/Huggy/Huggy-799989.onnx",
"reward": 3.839258238420648,
"creation_time": 1689238101.5042014,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799989.pt"
]
},
{
"steps": 999990,
"file_path": "results/Huggy/Huggy/Huggy-999990.onnx",
"reward": 3.5285701155662537,
"creation_time": 1689238341.8566735,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999990.pt"
]
},
{
"steps": 1199945,
"file_path": "results/Huggy/Huggy/Huggy-1199945.onnx",
"reward": 3.7147426563832493,
"creation_time": 1689238587.6245615,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199945.pt"
]
},
{
"steps": 1399957,
"file_path": "results/Huggy/Huggy/Huggy-1399957.onnx",
"reward": 3.8414404141016245,
"creation_time": 1689238823.9144192,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399957.pt"
]
},
{
"steps": 1599925,
"file_path": "results/Huggy/Huggy/Huggy-1599925.onnx",
"reward": 4.0348625224866685,
"creation_time": 1689239066.4504163,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599925.pt"
]
},
{
"steps": 1799981,
"file_path": "results/Huggy/Huggy/Huggy-1799981.onnx",
"reward": 3.8780794035304678,
"creation_time": 1689239308.8871295,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799981.pt"
]
},
{
"steps": 1999984,
"file_path": "results/Huggy/Huggy/Huggy-1999984.onnx",
"reward": 3.7389710135757923,
"creation_time": 1689239552.1597924,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999984.pt"
]
},
{
"steps": 2000075,
"file_path": "results/Huggy/Huggy/Huggy-2000075.onnx",
"reward": 3.7472664406805327,
"creation_time": 1689239552.2921605,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000075.pt"
]
}
],
"final_checkpoint": {
"steps": 2000075,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7472664406805327,
"creation_time": 1689239552.2921605,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000075.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}