ppo-Huggy / run_logs /training_status.json
isotnek's picture
Huggy
96943b2
{
"Huggy": {
"checkpoints": [
{
"steps": 199931,
"file_path": "results/Huggy/Huggy/Huggy-199931.onnx",
"reward": 3.310282061334516,
"creation_time": 1700137201.555207,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199931.pt"
]
},
{
"steps": 399952,
"file_path": "results/Huggy/Huggy/Huggy-399952.onnx",
"reward": 4.07354390834059,
"creation_time": 1700137452.3612425,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399952.pt"
]
},
{
"steps": 599979,
"file_path": "results/Huggy/Huggy/Huggy-599979.onnx",
"reward": 4.207177492288443,
"creation_time": 1700137701.5628445,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599979.pt"
]
},
{
"steps": 799894,
"file_path": "results/Huggy/Huggy/Huggy-799894.onnx",
"reward": 3.6572826590440046,
"creation_time": 1700137948.1709492,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799894.pt"
]
},
{
"steps": 999649,
"file_path": "results/Huggy/Huggy/Huggy-999649.onnx",
"reward": 3.376716490089893,
"creation_time": 1700138200.8976467,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999649.pt"
]
},
{
"steps": 1199934,
"file_path": "results/Huggy/Huggy/Huggy-1199934.onnx",
"reward": 3.641514708255899,
"creation_time": 1700138455.491709,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199934.pt"
]
},
{
"steps": 1399995,
"file_path": "results/Huggy/Huggy/Huggy-1399995.onnx",
"reward": 3.682824565234937,
"creation_time": 1700138711.0924504,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399995.pt"
]
}
]
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}