ppo-Huggy / run_logs /training_status.json
xianbin's picture
Huggy
7291c0c
{
"Huggy": {
"checkpoints": [
{
"steps": 199864,
"file_path": "results/Huggy/Huggy/Huggy-199864.onnx",
"reward": 2.9523679008086523,
"creation_time": 1690354909.492126,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199864.pt"
]
},
{
"steps": 399856,
"file_path": "results/Huggy/Huggy/Huggy-399856.onnx",
"reward": 3.4305664319315072,
"creation_time": 1690355148.1719422,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399856.pt"
]
},
{
"steps": 599982,
"file_path": "results/Huggy/Huggy/Huggy-599982.onnx",
"reward": 3.6036401623004193,
"creation_time": 1690355390.52014,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599982.pt"
]
},
{
"steps": 799909,
"file_path": "results/Huggy/Huggy/Huggy-799909.onnx",
"reward": 3.947484433003094,
"creation_time": 1690355635.6478488,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799909.pt"
]
},
{
"steps": 999956,
"file_path": "results/Huggy/Huggy/Huggy-999956.onnx",
"reward": 3.8701184779813844,
"creation_time": 1690355882.4736001,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999956.pt"
]
},
{
"steps": 1199964,
"file_path": "results/Huggy/Huggy/Huggy-1199964.onnx",
"reward": 3.7281321772309237,
"creation_time": 1690356128.4857957,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199964.pt"
]
},
{
"steps": 1399971,
"file_path": "results/Huggy/Huggy/Huggy-1399971.onnx",
"reward": 3.9382699237150303,
"creation_time": 1690356374.7954793,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399971.pt"
]
},
{
"steps": 1599925,
"file_path": "results/Huggy/Huggy/Huggy-1599925.onnx",
"reward": 3.771826890764068,
"creation_time": 1690356616.8857822,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599925.pt"
]
},
{
"steps": 1799898,
"file_path": "results/Huggy/Huggy/Huggy-1799898.onnx",
"reward": 3.7551126385057296,
"creation_time": 1690356866.435258,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799898.pt"
]
},
{
"steps": 1999458,
"file_path": "results/Huggy/Huggy/Huggy-1999458.onnx",
"reward": 3.838476048949549,
"creation_time": 1690357116.800563,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999458.pt"
]
},
{
"steps": 2000208,
"file_path": "results/Huggy/Huggy/Huggy-2000208.onnx",
"reward": 3.8133164977790504,
"creation_time": 1690357116.9513822,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000208.pt"
]
}
],
"final_checkpoint": {
"steps": 2000208,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.8133164977790504,
"creation_time": 1690357116.9513822,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000208.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}