rlv2unit1b_ppo-Huggy / run_logs /training_status.json
QuickSilver007's picture
Huggy trained on HF course parameters
9888a14
{
"Huggy": {
"checkpoints": [
{
"steps": 199856,
"file_path": "results/Huggy/Huggy/Huggy-199856.onnx",
"reward": 3.4377814147431973,
"creation_time": 1679074265.661918,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199856.pt"
]
},
{
"steps": 399992,
"file_path": "results/Huggy/Huggy/Huggy-399992.onnx",
"reward": 3.500909498040105,
"creation_time": 1679074500.6516929,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399992.pt"
]
},
{
"steps": 599955,
"file_path": "results/Huggy/Huggy/Huggy-599955.onnx",
"reward": 3.9237097112032084,
"creation_time": 1679074741.989576,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599955.pt"
]
},
{
"steps": 799987,
"file_path": "results/Huggy/Huggy/Huggy-799987.onnx",
"reward": 3.8385373587065406,
"creation_time": 1679074979.4652178,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799987.pt"
]
},
{
"steps": 999991,
"file_path": "results/Huggy/Huggy/Huggy-999991.onnx",
"reward": 4.387690619462067,
"creation_time": 1679075221.5467772,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999991.pt"
]
},
{
"steps": 1199868,
"file_path": "results/Huggy/Huggy/Huggy-1199868.onnx",
"reward": 4.0737296952141655,
"creation_time": 1679075464.305183,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199868.pt"
]
},
{
"steps": 1399991,
"file_path": "results/Huggy/Huggy/Huggy-1399991.onnx",
"reward": 3.320854040292593,
"creation_time": 1679075705.5975075,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399991.pt"
]
},
{
"steps": 1599962,
"file_path": "results/Huggy/Huggy/Huggy-1599962.onnx",
"reward": 3.770990135291448,
"creation_time": 1679075939.864769,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599962.pt"
]
},
{
"steps": 1799971,
"file_path": "results/Huggy/Huggy/Huggy-1799971.onnx",
"reward": 3.89742534648715,
"creation_time": 1679076174.8621516,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799971.pt"
]
},
{
"steps": 1999901,
"file_path": "results/Huggy/Huggy/Huggy-1999901.onnx",
"reward": 3.8414260497633017,
"creation_time": 1679076412.3825796,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999901.pt"
]
},
{
"steps": 2000096,
"file_path": "results/Huggy/Huggy/Huggy-2000096.onnx",
"reward": 3.9106133645231074,
"creation_time": 1679076412.6094215,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000096.pt"
]
}
],
"final_checkpoint": {
"steps": 2000096,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9106133645231074,
"creation_time": 1679076412.6094215,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000096.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}