@@ -67,7 +67,7 @@ def summarize_results(logs_dir, args, start, finish):
67
67
ttfts = []
68
68
tg_lats = []
69
69
for n in range (args .num_processes ):
70
- results = open (f"{ logs_dir } /log_{ n } " , "r" ).readlines ()[- 9 ].split ("|" )
70
+ results = open (f"{ logs_dir } /log_{ n } " , "r" ).readlines ()[5 ].split ("|" )
71
71
prompt_size = int (results [1 ])
72
72
assert prompt_size == args .prompt_size
73
73
tokens_generated = int (results [2 ])
@@ -142,11 +142,11 @@ def main():
142
142
if mem_place == "none" :
143
143
cmd = ["numactl" , f"--physcpubind={ gen_threads_config (args .num_threads , n )} " ,
144
144
"/llm/llama-batched-bench" , "-m" , args .model , "-c" , str (args .kv_cache ), "-b" , "2048" , "-ub" , "512" , "-npp" , str (args .prompt_size ), "-ntg" , str (TOKENS ),
145
- "-npl" , str (args .batch_size ), "-t" , str (args .num_threads ), "-tb" , str (args .num_threads ), "-td" , str ( args . num_threads ) ]
145
+ "-npl" , str (args .batch_size ), "-t" , str (args .num_threads ), "-tb" , str (args .num_threads )]
146
146
else :
147
147
cmd = ["numactl" , f"--physcpubind={ gen_threads_config (args .num_threads , n )} " ,str (mem_place ),
148
148
"/llm/llama-batched-bench" , "-m" , args .model , "-c" , str (args .kv_cache ), "-b" , "2048" , "-ub" , "512" , "-npp" , str (args .prompt_size ), "-ntg" , str (TOKENS ),
149
- "-npl" , str (args .batch_size ), "-t" , str (args .num_threads ), "-tb" , str (args .num_threads ), "-td" , str ( args . num_threads ) ]
149
+ "-npl" , str (args .batch_size ), "-t" , str (args .num_threads ), "-tb" , str (args .num_threads )]
150
150
151
151
else :
152
152
print ("FAIL: batched-bench not found!" )
0 commit comments