{ "name": "root", "gauges": { "Pyramids.Policy.Entropy.mean": { "value": 0.39757049083709717, "min": 0.39757049083709717, "max": 1.3595595359802246, "count": 30 }, "Pyramids.Policy.Entropy.sum": { "value": 11920.75390625, "min": 11920.75390625, "max": 41243.59765625, "count": 30 }, "Pyramids.Step.mean": { "value": 899956.0, "min": 29885.0, "max": 899956.0, "count": 30 }, "Pyramids.Step.sum": { "value": 899956.0, "min": 29885.0, "max": 899956.0, "count": 30 }, "Pyramids.Policy.ExtrinsicValueEstimate.mean": { "value": 0.4898543655872345, "min": -0.0930364727973938, "max": 0.5115171074867249, "count": 30 }, "Pyramids.Policy.ExtrinsicValueEstimate.sum": { "value": 132.26068115234375, "min": -22.421789169311523, "max": 139.13265991210938, "count": 30 }, "Pyramids.Policy.RndValueEstimate.mean": { "value": -0.09477725625038147, "min": -0.09477725625038147, "max": 0.4064110219478607, "count": 30 }, "Pyramids.Policy.RndValueEstimate.sum": { "value": -25.589859008789062, "min": -25.589859008789062, "max": 96.31941223144531, "count": 30 }, "Pyramids.Losses.PolicyLoss.mean": { "value": 0.06938424920921939, "min": 0.06483511217628184, "max": 0.07380248141961257, "count": 30 }, "Pyramids.Losses.PolicyLoss.sum": { "value": 1.0407637381382908, "min": 0.5904198513569006, "max": 1.0407637381382908, "count": 30 }, "Pyramids.Losses.ValueLoss.mean": { "value": 0.015227381634112032, "min": 0.0006474719040076074, "max": 0.01643273844057971, "count": 30 }, "Pyramids.Losses.ValueLoss.sum": { "value": 0.22841072451168049, "min": 0.008213004547359109, "max": 0.23005833816811597, "count": 30 }, "Pyramids.Policy.LearningRate.mean": { "value": 4.973965008711112e-06, "min": 4.973965008711112e-06, "max": 0.00029417966860677773, "count": 30 }, "Pyramids.Policy.LearningRate.sum": { "value": 7.460947513066668e-05, "min": 7.460947513066668e-05, "max": 0.003448147817284111, "count": 30 }, "Pyramids.Policy.Epsilon.mean": { "value": 0.10165795555555555, "min": 0.10165795555555555, "max": 0.1980598888888889, "count": 30 }, "Pyramids.Policy.Epsilon.sum": { "value": 1.5248693333333334, "min": 1.4705237777777782, "max": 2.543327111111111, "count": 30 }, "Pyramids.Policy.Beta.mean": { "value": 0.00017562976, "min": 0.00017562976, "max": 0.009806182900000002, "count": 30 }, "Pyramids.Policy.Beta.sum": { "value": 0.0026344464, "min": 0.0026344464, "max": 0.1149533173, "count": 30 }, "Pyramids.Losses.RNDLoss.mean": { "value": 0.013464792631566525, "min": 0.013354185037314892, "max": 0.5027457475662231, "count": 30 }, "Pyramids.Losses.RNDLoss.sum": { "value": 0.2019718885421753, "min": 0.18695859611034393, "max": 4.021965980529785, "count": 30 }, "Pyramids.Environment.EpisodeLength.mean": { "value": 386.56, "min": 353.8780487804878, "max": 999.0, "count": 30 }, "Pyramids.Environment.EpisodeLength.sum": { "value": 28992.0, "min": 17579.0, "max": 33190.0, "count": 30 }, "Pyramids.Environment.CumulativeReward.mean": { "value": 1.560093317826589, "min": -0.9999742455059483, "max": 1.5994320809841156, "count": 30 }, "Pyramids.Environment.CumulativeReward.sum": { "value": 117.00699883699417, "min": -31.995201662182808, "max": 129.55399855971336, "count": 30 }, "Pyramids.Policy.ExtrinsicReward.mean": { "value": 1.560093317826589, "min": -0.9999742455059483, "max": 1.5994320809841156, "count": 30 }, "Pyramids.Policy.ExtrinsicReward.sum": { "value": 117.00699883699417, "min": -31.995201662182808, "max": 129.55399855971336, "count": 30 }, "Pyramids.Policy.RndReward.mean": { "value": 0.05361438301566523, "min": 0.05361438301566523, "max": 10.323105855948395, "count": 30 }, "Pyramids.Policy.RndReward.sum": { "value": 4.021078726174892, "min": 4.021078726174892, "max": 185.8159054070711, "count": 30 }, "Pyramids.IsTraining.mean": { "value": 1.0, "min": 1.0, "max": 1.0, "count": 30 }, "Pyramids.IsTraining.sum": { "value": 1.0, "min": 1.0, "max": 1.0, "count": 30 } }, "metadata": { "timer_format_version": "0.1.0", "start_time_seconds": "1657738490", "python_version": "3.7.13 (default, Apr 24 2022, 01:04:09) \n[GCC 7.5.0]", "command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./trained-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics", "mlagents_version": "0.29.0.dev0", "mlagents_envs_version": "0.29.0.dev0", "communication_protocol_version": "1.5.0", "pytorch_version": "1.8.1+cu102", "numpy_version": "1.21.6", "end_time_seconds": "1657740373" }, "total": 1882.9600919249997, "count": 1, "self": 0.42833328099959544, "children": { "run_training.setup": { "total": 0.054805991999728576, "count": 1, "self": 0.054805991999728576 }, "TrainerController.start_learning": { "total": 1882.4769526520004, "count": 1, "self": 1.3027695909495378, "children": { "TrainerController._reset_env": { "total": 6.473618197000178, "count": 1, "self": 6.473618197000178 }, "TrainerController.advance": { "total": 1874.6021489080504, "count": 57289, "self": 1.4039689420374089, "children": { "env_step": { "total": 1219.1033666070125, "count": 57289, "self": 1119.6238383419636, "children": { "SubprocessEnvManager._take_step": { "total": 98.8068662120113, "count": 57289, "self": 4.268505632986944, "children": { "TorchPolicy.evaluate": { "total": 94.53836057902436, "count": 56309, "self": 32.348266454005625, "children": { "TorchPolicy.sample_actions": { "total": 62.19009412501873, "count": 56309, "self": 62.19009412501873 } } } } }, "workers": { "total": 0.672662053037584, "count": 57289, "self": 0.0, "children": { "worker_root": { "total": 1878.9325053499733, "count": 57289, "is_parallel": true, "self": 852.3902138780068, "children": { "run_training.setup": { "total": 0.0, "count": 0, "is_parallel": true, "self": 0.0, "children": { "steps_from_proto": { "total": 0.0019260659996689355, "count": 1, "is_parallel": true, "self": 0.0007521909992647124, "children": { "_process_rank_one_or_two_observation": { "total": 0.001173875000404223, "count": 8, "is_parallel": true, "self": 0.001173875000404223 } } }, "UnityEnvironment.step": { "total": 0.04927950499995859, "count": 1, "is_parallel": true, "self": 0.00047984999901018455, "children": { "UnityEnvironment._generate_step_input": { "total": 0.000427511000452796, "count": 1, "is_parallel": true, "self": 0.000427511000452796 }, "communicator.exchange": { "total": 0.04671544400025596, "count": 1, "is_parallel": true, "self": 0.04671544400025596 }, "steps_from_proto": { "total": 0.0016567000002396526, "count": 1, "is_parallel": true, "self": 0.0004194039997855725, "children": { "_process_rank_one_or_two_observation": { "total": 0.00123729600045408, "count": 8, "is_parallel": true, "self": 0.00123729600045408 } } } } } } }, "UnityEnvironment.step": { "total": 1026.5422914719666, "count": 57288, "is_parallel": true, "self": 25.48484664891157, "children": { "UnityEnvironment._generate_step_input": { "total": 21.37142343792675, "count": 57288, "is_parallel": true, "self": 21.37142343792675 }, "communicator.exchange": { "total": 894.4486843141308, "count": 57288, "is_parallel": true, "self": 894.4486843141308 }, "steps_from_proto": { "total": 85.23733707099746, "count": 57288, "is_parallel": true, "self": 21.078682677283723, "children": { "_process_rank_one_or_two_observation": { "total": 64.15865439371373, "count": 458304, "is_parallel": true, "self": 64.15865439371373 } } } } } } } } } } }, "trainer_advance": { "total": 654.0948133590005, "count": 57289, "self": 2.4162042220768853, "children": { "process_trajectory": { "total": 149.28206904092258, "count": 57289, "self": 149.1674607319228, "children": { "RLTrainer._checkpoint": { "total": 0.11460830899977736, "count": 1, "self": 0.11460830899977736 } } }, "_update_policy": { "total": 502.396540096001, "count": 408, "self": 196.61358089001578, "children": { "TorchPPOOptimizer.update": { "total": 305.7829592059852, "count": 20532, "self": 305.7829592059852 } } } } } } }, "trainer_threads": { "total": 1.1089996405644342e-06, "count": 1, "self": 1.1089996405644342e-06 }, "TrainerController._save_models": { "total": 0.0984148470006403, "count": 1, "self": 0.0017685410002741264, "children": { "RLTrainer._checkpoint": { "total": 0.09664630600036617, "count": 1, "self": 0.09664630600036617 } } } } } } }