poca-SoccerTwos / run_logs /timers.json
YoavWigelman's picture
Second Push
cd389d3
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.1379538774490356,
"min": 0.9347377419471741,
"max": 1.2154386043548584,
"count": 5000
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 22759.078125,
"min": 16144.0087890625,
"max": 28788.599609375,
"count": 5000
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 77.0,
"min": 47.0,
"max": 109.45652173913044,
"count": 5000
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 20328.0,
"min": 17524.0,
"max": 22004.0,
"count": 5000
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1796.1838299057483,
"min": 1766.9128790251336,
"max": 2009.782144467024,
"count": 5000
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 237096.26554755878,
"min": 162464.24892216737,
"max": 408058.2623436854,
"count": 5000
},
"SoccerTwos.Step.mean": {
"value": 199999836.0,
"min": 150009962.0,
"max": 199999836.0,
"count": 5000
},
"SoccerTwos.Step.sum": {
"value": 199999836.0,
"min": 150009962.0,
"max": 199999836.0,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.006698582321405411,
"min": -0.13151130080223083,
"max": 0.11459764093160629,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 0.884212851524353,
"min": -20.03160285949707,
"max": 15.929072380065918,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.008320164866745472,
"min": -0.13271933794021606,
"max": 0.1126069650053978,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": 1.0982617139816284,
"min": -19.85004234313965,
"max": 15.652368545532227,
"count": 5000
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 5000
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.09068181839856235,
"min": -0.4092910343203051,
"max": 0.3747726631679123,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -11.97000002861023,
"min": -65.99200028181076,
"max": 55.140800297260284,
"count": 5000
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.09068181839856235,
"min": -0.4092910343203051,
"max": 0.3747726631679123,
"count": 5000
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -11.97000002861023,
"min": -65.99200028181076,
"max": 55.140800297260284,
"count": 5000
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 5000
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 5000
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.01593884900600339,
"min": 0.01253225206813416,
"max": 0.026699458491930273,
"count": 2426
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.01593884900600339,
"min": 0.01253225206813416,
"max": 0.026699458491930273,
"count": 2426
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.0718688689172268,
"min": 0.05997977815568447,
"max": 0.11612731764713922,
"count": 2426
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.0718688689172268,
"min": 0.05997977815568447,
"max": 0.11612731764713922,
"count": 2426
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.07221189687649408,
"min": 0.06059066231052081,
"max": 0.11714418778816858,
"count": 2426
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.07221189687649408,
"min": 0.06059066231052081,
"max": 0.11714418778816858,
"count": 2426
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 2426
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 2426
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 2426
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 2426
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 2426
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 2426
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1688012068",
"python_version": "3.9.16 | packaged by conda-forge | (main, Feb 1 2023, 21:39:03) \n[GCC 11.3.0]",
"command_line_arguments": "/home/yoav/anaconda3/envs/2vs2_soccer/bin/mlagents-learn ./config/poca/SoccerTwos.yaml --env=training-envs-executables/SoccerTwos/SoccerTwos.x86_64 --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1688083976"
},
"total": 71907.872196146,
"count": 1,
"self": 0.21947479801019654,
"children": {
"run_training.setup": {
"total": 0.008186355989892036,
"count": 1,
"self": 0.008186355989892036
},
"TrainerController.start_learning": {
"total": 71907.644534992,
"count": 1,
"self": 71.42090288587497,
"children": {
"TrainerController._reset_env": {
"total": 7.242712540260982,
"count": 251,
"self": 7.242712540260982
},
"TrainerController.advance": {
"total": 71828.76494877986,
"count": 3422862,
"self": 66.44977636545082,
"children": {
"env_step": {
"total": 51541.429678388406,
"count": 3422862,
"self": 37821.549245519214,
"children": {
"SubprocessEnvManager._take_step": {
"total": 13680.694317420828,
"count": 3422862,
"self": 372.45103261907934,
"children": {
"TorchPolicy.evaluate": {
"total": 13308.243284801749,
"count": 6278128,
"self": 13308.243284801749
}
}
},
"workers": {
"total": 39.186115448363125,
"count": 3422862,
"self": 0.0,
"children": {
"worker_root": {
"total": 71820.2092914654,
"count": 3422862,
"is_parallel": true,
"self": 41044.5662780235,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0017572160286363214,
"count": 2,
"is_parallel": true,
"self": 0.00041944000986404717,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013377760187722743,
"count": 8,
"is_parallel": true,
"self": 0.0013377760187722743
}
}
},
"UnityEnvironment.step": {
"total": 0.024257199984276667,
"count": 1,
"is_parallel": true,
"self": 0.0006558480090461671,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.00032233400270342827,
"count": 1,
"is_parallel": true,
"self": 0.00032233400270342827
},
"communicator.exchange": {
"total": 0.02124975499464199,
"count": 1,
"is_parallel": true,
"self": 0.02124975499464199
},
"steps_from_proto": {
"total": 0.0020292629778850824,
"count": 2,
"is_parallel": true,
"self": 0.0004114069161005318,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0016178560617845505,
"count": 8,
"is_parallel": true,
"self": 0.0016178560617845505
}
}
}
}
}
}
},
"steps_from_proto": {
"total": 0.4603567080339417,
"count": 500,
"is_parallel": true,
"self": 0.09795306270825677,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.3624036453256849,
"count": 2000,
"is_parallel": true,
"self": 0.3624036453256849
}
}
},
"UnityEnvironment.step": {
"total": 30775.182656733872,
"count": 3422861,
"is_parallel": true,
"self": 1669.3998983051279,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 1166.5835989805928,
"count": 3422861,
"is_parallel": true,
"self": 1166.5835989805928
},
"communicator.exchange": {
"total": 22646.438464231906,
"count": 3422861,
"is_parallel": true,
"self": 22646.438464231906
},
"steps_from_proto": {
"total": 5292.760695216246,
"count": 6845722,
"is_parallel": true,
"self": 1138.2786801359034,
"children": {
"_process_rank_one_or_two_observation": {
"total": 4154.4820150803425,
"count": 27382888,
"is_parallel": true,
"self": 4154.4820150803425
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 20220.885494026006,
"count": 3422862,
"self": 427.4996939775883,
"children": {
"process_trajectory": {
"total": 7681.809909527277,
"count": 3422862,
"self": 7659.8104141058575,
"children": {
"RLTrainer._checkpoint": {
"total": 21.99949542141985,
"count": 100,
"self": 21.99949542141985
}
}
},
"_update_policy": {
"total": 12111.57589052114,
"count": 2426,
"self": 7725.115777124796,
"children": {
"TorchPOCAOptimizer.update": {
"total": 4386.460113396344,
"count": 72780,
"self": 4386.460113396344
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.167994923889637e-06,
"count": 1,
"self": 1.167994923889637e-06
},
"TrainerController._save_models": {
"total": 0.21596961800241843,
"count": 1,
"self": 0.0015275460318662226,
"children": {
"RLTrainer._checkpoint": {
"total": 0.2144420719705522,
"count": 1,
"self": 0.2144420719705522
}
}
}
}
}
}
}