hf-transformers-bot's picture
Upload folder using huggingface_hub
9bf5a6d verified
raw
history blame
17.9 kB
{
"config": {
"name": "pytorch_generate",
"backend": {
"name": "pytorch",
"version": "2.4.0+cu121",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-generation",
"library": "transformers",
"model_type": "gemma",
"model": "google/gemma-2b",
"processor": "google/gemma-2b",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": "float16",
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": "static",
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {
"backend": "inductor",
"mode": "reduce-overhead",
"fullgraph": true
},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 2,
"duration": 0,
"warmup_runs": 10,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 7
},
"new_tokens": null,
"memory": true,
"latency": true,
"energy": false,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 128,
"min_new_tokens": 128,
"do_sample": false
},
"call_kwargs": {}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "warn",
"numactl": false,
"numactl_kwargs": {},
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7R32",
"cpu_count": 16,
"cpu_ram_mb": 66697.261056,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.10.223-211.872.amzn2.x86_64-x86_64-with-glibc2.29",
"processor": "x86_64",
"python_version": "3.8.10",
"gpu": [
"NVIDIA A10G"
],
"gpu_count": 1,
"gpu_vram_mb": 24146608128,
"optimum_benchmark_version": "0.4.0",
"optimum_benchmark_commit": null,
"transformers_version": "4.45.0.dev0",
"transformers_commit": "0a7af19f4dc868bafc82f35eb7e8d13bac87a594",
"accelerate_version": "0.34.0.dev0",
"accelerate_commit": null,
"diffusers_version": null,
"diffusers_commit": null,
"optimum_version": "1.22.0.dev0",
"optimum_commit": null,
"timm_version": "0.9.16",
"timm_commit": null,
"peft_version": "0.12.1.dev0",
"peft_commit": null
}
},
"report": {
"load": {
"memory": {
"unit": "MB",
"max_ram": 1319.800832,
"max_global_vram": 6768.033792,
"max_process_vram": 0.0,
"max_reserved": 6138.363904,
"max_allocated": 6060.931072
},
"latency": {
"unit": "s",
"count": 1,
"total": 11.7128583984375,
"mean": 11.7128583984375,
"stdev": 0.0,
"p50": 11.7128583984375,
"p90": 11.7128583984375,
"p95": 11.7128583984375,
"p99": 11.7128583984375,
"values": [
11.7128583984375
]
},
"throughput": null,
"energy": null,
"efficiency": null
},
"prefill": {
"memory": {
"unit": "MB",
"max_ram": 1775.181824,
"max_global_vram": 6789.005312,
"max_process_vram": 0.0,
"max_reserved": 6142.558208,
"max_allocated": 5028.450816
},
"latency": {
"unit": "s",
"count": 2,
"total": 0.04284735870361328,
"mean": 0.02142367935180664,
"stdev": 0.00012409591674804668,
"p50": 0.02142367935180664,
"p90": 0.02152295608520508,
"p95": 0.021535365676879883,
"p99": 0.021545293350219725,
"values": [
0.021299583435058594,
0.021547775268554687
]
},
"throughput": {
"unit": "tokens/s",
"value": 326.7412606887106
},
"energy": null,
"efficiency": null
},
"decode": {
"memory": {
"unit": "MB",
"max_ram": 1799.63904,
"max_global_vram": 6793.199616,
"max_process_vram": 0.0,
"max_reserved": 6146.752512,
"max_allocated": 5031.820288
},
"latency": {
"unit": "s",
"count": 2,
"total": 5.143392578125,
"mean": 2.5716962890625,
"stdev": 0.015890869140624986,
"p50": 2.5716962890625,
"p90": 2.584408984375,
"p95": 2.5859980712890622,
"p99": 2.5872693408203125,
"values": [
2.555805419921875,
2.587587158203125
]
},
"throughput": {
"unit": "tokens/s",
"value": 49.3837474277716
},
"energy": null,
"efficiency": null
},
"per_token": {
"memory": null,
"latency": {
"unit": "s",
"count": 254,
"total": 5.1431331844329815,
"mean": 0.020248555844224345,
"stdev": 0.00040859367375037484,
"p50": 0.020331520080566406,
"p90": 0.020648550987243653,
"p95": 0.020862720012664794,
"p99": 0.021247232265472413,
"values": [
0.019732479095458985,
0.019717119216918946,
0.019677183151245118,
0.019687423706054686,
0.01965977668762207,
0.01964646339416504,
0.019693567276000978,
0.01965670394897461,
0.01963827133178711,
0.01964339256286621,
0.01967616081237793,
0.01963929557800293,
0.01968230438232422,
0.01962188720703125,
0.01963520050048828,
0.01967411231994629,
0.019708927154541016,
0.020496383666992187,
0.019717119216918946,
0.02020351982116699,
0.020763647079467772,
0.02049945640563965,
0.01979903984069824,
0.019709951400756837,
0.019785728454589844,
0.01982975959777832,
0.01966592025756836,
0.01967001533508301,
0.019681280136108398,
0.019733503341674806,
0.01965772819519043,
0.01964134407043457,
0.01966796875,
0.019644416809082032,
0.01964339256286621,
0.019629056930541993,
0.02086092758178711,
0.020535295486450195,
0.020900863647460938,
0.020788223266601562,
0.020516864776611327,
0.02047488021850586,
0.020535295486450195,
0.020493312835693358,
0.020544511795043945,
0.020533248901367186,
0.02049945640563965,
0.01968332862854004,
0.01966182327270508,
0.01969049644470215,
0.019725311279296876,
0.01969049644470215,
0.019742719650268553,
0.01966080093383789,
0.0196997127532959,
0.01967411231994629,
0.01966182327270508,
0.01963724708557129,
0.019693567276000978,
0.01967820739746094,
0.01967820739746094,
0.01965363121032715,
0.01965158462524414,
0.019672063827514647,
0.019672063827514647,
0.019679231643676756,
0.01966592025756836,
0.01967103958129883,
0.01966182327270508,
0.01965875244140625,
0.019709951400756837,
0.019713024139404296,
0.019886079788208007,
0.020335615158081053,
0.019826688766479493,
0.02086604881286621,
0.020586496353149415,
0.01971609687805176,
0.019679231643676756,
0.021163007736206055,
0.020996095657348633,
0.020564992904663085,
0.02050048065185547,
0.020552703857421875,
0.02071347236633301,
0.020770816802978515,
0.0204769287109375,
0.020496383666992187,
0.020642816543579103,
0.02088960075378418,
0.02082508850097656,
0.020574207305908202,
0.02045747184753418,
0.020447231292724608,
0.020806655883789063,
0.020692991256713866,
0.019755008697509766,
0.01969049644470215,
0.02030899238586426,
0.020521984100341797,
0.020566015243530272,
0.020571136474609376,
0.020541439056396483,
0.020517887115478514,
0.020530176162719727,
0.020486143112182616,
0.020544511795043945,
0.019664896011352538,
0.019747840881347657,
0.020509695053100584,
0.02031718444824219,
0.02045952033996582,
0.020478975296020507,
0.020526079177856444,
0.020529151916503906,
0.020496383666992187,
0.020528127670288086,
0.02051584053039551,
0.020398080825805662,
0.020574207305908202,
0.020553728103637696,
0.020921344757080077,
0.021551103591918946,
0.021342208862304687,
0.02010419273376465,
0.019784704208374023,
0.01967513656616211,
0.019701759338378907,
0.019947519302368166,
0.02065100860595703,
0.020609024047851563,
0.01969254493713379,
0.019681280136108398,
0.01965260887145996,
0.019748863220214845,
0.019708927154541016,
0.02091110420227051,
0.02066329574584961,
0.02039910316467285,
0.020315135955810547,
0.020295679092407228,
0.020273151397705077,
0.020395008087158203,
0.02036735916137695,
0.02025369644165039,
0.0202926082611084,
0.020297727584838866,
0.020505599975585938,
0.020379648208618165,
0.020320255279541014,
0.020402175903320312,
0.020319232940673827,
0.02026905632019043,
0.020280319213867186,
0.020287488937377928,
0.02027008056640625,
0.020287488937377928,
0.020503551483154296,
0.020412416458129884,
0.020324352264404297,
0.020322303771972656,
0.020316160202026368,
0.020319232940673827,
0.020320255279541014,
0.020304895401000975,
0.020338687896728515,
0.020289535522460937,
0.020321279525756835,
0.020311040878295897,
0.020339712142944336,
0.020369407653808593,
0.020337663650512695,
0.020445184707641603,
0.021155839920043946,
0.020722688674926756,
0.02030284881591797,
0.020298751831054687,
0.020396032333374024,
0.02045132827758789,
0.020356096267700196,
0.02028339195251465,
0.02027724838256836,
0.020314111709594726,
0.020298751831054687,
0.020307968139648438,
0.020319232940673827,
0.020327423095703127,
0.02027622413635254,
0.02028441619873047,
0.02042572784423828,
0.02031001663208008,
0.02040934371948242,
0.020412416458129884,
0.020364288330078126,
0.020320255279541014,
0.02030284881591797,
0.020587520599365236,
0.02045132827758789,
0.020427776336669923,
0.020379648208618165,
0.020332544326782227,
0.020314111709594726,
0.020355072021484375,
0.020312063217163084,
0.020306943893432617,
0.02027622413635254,
0.020550655364990233,
0.02059878349304199,
0.02187571144104004,
0.02067046356201172,
0.02047590446472168,
0.02050048065185547,
0.020331520080566406,
0.020389888763427736,
0.020348928451538087,
0.02039193534851074,
0.020349952697753908,
0.020341760635375978,
0.020361215591430663,
0.020362239837646484,
0.020338687896728515,
0.020382720947265624,
0.02045132827758789,
0.020554752349853517,
0.021128192901611328,
0.02085068893432617,
0.020324352264404297,
0.020363264083862305,
0.020329471588134765,
0.020353023529052734,
0.02030080032348633,
0.02027212715148926,
0.020361215591430663,
0.020353023529052734,
0.02089779281616211,
0.020551679611206054,
0.02046463966369629,
0.020311040878295897,
0.020331520080566406,
0.020371456146240235,
0.020282367706298828,
0.020378623962402344,
0.0204400634765625,
0.020307968139648438,
0.020410367965698242,
0.02039910316467285,
0.020356096267700196,
0.020402175903320312,
0.020347904205322266,
0.020344831466674804,
0.02046668815612793,
0.020447231292724608,
0.02028339195251465,
0.020331520080566406
]
},
"throughput": {
"unit": "tokens/s",
"value": 49.38623809486334
},
"energy": null,
"efficiency": null
}
}
}