ppo-Huggy / run_logs /training_status.json
dcfidalgo's picture
Huggy
c0fbd1e
{
"Huggy": {
"checkpoints": [
{
"steps": 199744,
"file_path": "results/Huggy/Huggy/Huggy-199744.onnx",
"reward": 3.421137053458417,
"creation_time": 1699273582.645185,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199744.pt"
]
},
{
"steps": 399978,
"file_path": "results/Huggy/Huggy/Huggy-399978.onnx",
"reward": 3.97481669485569,
"creation_time": 1699273816.9030275,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399978.pt"
]
},
{
"steps": 599983,
"file_path": "results/Huggy/Huggy/Huggy-599983.onnx",
"reward": 4.329321575164795,
"creation_time": 1699274054.6848993,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599983.pt"
]
},
{
"steps": 799899,
"file_path": "results/Huggy/Huggy/Huggy-799899.onnx",
"reward": 3.7693884315736153,
"creation_time": 1699274288.3024037,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799899.pt"
]
},
{
"steps": 999972,
"file_path": "results/Huggy/Huggy/Huggy-999972.onnx",
"reward": 3.749069263014877,
"creation_time": 1699274528.4621415,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999972.pt"
]
},
{
"steps": 1199741,
"file_path": "results/Huggy/Huggy/Huggy-1199741.onnx",
"reward": 3.7659693372492886,
"creation_time": 1699274782.6967657,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199741.pt"
]
},
{
"steps": 1399949,
"file_path": "results/Huggy/Huggy/Huggy-1399949.onnx",
"reward": 3.7015650120435977,
"creation_time": 1699275026.2020774,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399949.pt"
]
},
{
"steps": 1599858,
"file_path": "results/Huggy/Huggy/Huggy-1599858.onnx",
"reward": 3.892572901750866,
"creation_time": 1699275281.4540238,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599858.pt"
]
},
{
"steps": 1799903,
"file_path": "results/Huggy/Huggy/Huggy-1799903.onnx",
"reward": 3.598031185441098,
"creation_time": 1699275539.7983558,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799903.pt"
]
},
{
"steps": 1999810,
"file_path": "results/Huggy/Huggy/Huggy-1999810.onnx",
"reward": 3.5264355524992332,
"creation_time": 1699275813.9182703,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999810.pt"
]
},
{
"steps": 2000560,
"file_path": "results/Huggy/Huggy/Huggy-2000560.onnx",
"reward": 3.4907757469585965,
"creation_time": 1699275814.1343024,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000560.pt"
]
}
],
"final_checkpoint": {
"steps": 2000560,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.4907757469585965,
"creation_time": 1699275814.1343024,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000560.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}