Spaces:
Build error
Build error
llama3 en result analyzed
Browse files- competition/00d_Llama3_Results.ipynb +0 -0
- competition/{11_Llama-3_8b_p1_analysis.ipynb → 11a_Llama-3_8b_p1_analysis.ipynb} +0 -0
- competition/11b_Llama-3_8b_p1_en_analysis.ipynb +0 -0
- competition/11b_Llama-3_8b_p2_en_analysis.ipynb +0 -0
- competition/{11b_Llama-3_8b_p1_r2_analysis.ipynb → 11c_Llama-3_8b_p1_r2_analysis.ipynb} +0 -0
- llm_toolkit/llm_utils.py +1 -1
- results/mgtv-llama3_p1_en_full_metrics.csv +1 -0
- results/mgtv-llama3_p2_en_full_metrics.csv +1 -0
competition/00d_Llama3_Results.ipynb
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
competition/{11_Llama-3_8b_p1_analysis.ipynb → 11a_Llama-3_8b_p1_analysis.ipynb}
RENAMED
File without changes
|
competition/11b_Llama-3_8b_p1_en_analysis.ipynb
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
competition/11b_Llama-3_8b_p2_en_analysis.ipynb
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
competition/{11b_Llama-3_8b_p1_r2_analysis.ipynb → 11c_Llama-3_8b_p1_r2_analysis.ipynb}
RENAMED
File without changes
|
llm_toolkit/llm_utils.py
CHANGED
@@ -30,7 +30,7 @@ def load_model(
|
|
30 |
):
|
31 |
print(f"loading model: {model_name} with adapter: {adapter_name_or_path}")
|
32 |
|
33 |
-
if
|
34 |
from llamafactory.chat import ChatModel
|
35 |
|
36 |
template = get_template(model_name)
|
|
|
30 |
):
|
31 |
print(f"loading model: {model_name} with adapter: {adapter_name_or_path}")
|
32 |
|
33 |
+
if using_llama_factory:
|
34 |
from llamafactory.chat import ChatModel
|
35 |
|
36 |
template = get_template(model_name)
|
results/mgtv-llama3_p1_en_full_metrics.csv
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
epoch,model,accuracy,precision,recall,f1
|
|
|
2 |
0.3333333333333333,meta-llama/Meta-Llama-3-8B-Instruct/checkpoint-117_torch.bfloat16_lf,0.6486666666666666,0.6525934632970077,0.6486666666666666,0.6312721163517108
|
3 |
0.6666666666666666,meta-llama/Meta-Llama-3-8B-Instruct/checkpoint-234_torch.bfloat16_lf,0.561,0.6897096276142071,0.561,0.6083393704375663
|
4 |
1.0,meta-llama/Meta-Llama-3-8B-Instruct/checkpoint-351_torch.bfloat16_lf,0.621,0.686842945161901,0.621,0.6417441253605001
|
|
|
1 |
epoch,model,accuracy,precision,recall,f1
|
2 |
+
0.0,meta-llama/Meta-Llama-3-8B-Instruct_torch.bfloat16_lf,0.13333333333333333,0.5430486329272943,0.13333333333333333,0.17807889451865855
|
3 |
0.3333333333333333,meta-llama/Meta-Llama-3-8B-Instruct/checkpoint-117_torch.bfloat16_lf,0.6486666666666666,0.6525934632970077,0.6486666666666666,0.6312721163517108
|
4 |
0.6666666666666666,meta-llama/Meta-Llama-3-8B-Instruct/checkpoint-234_torch.bfloat16_lf,0.561,0.6897096276142071,0.561,0.6083393704375663
|
5 |
1.0,meta-llama/Meta-Llama-3-8B-Instruct/checkpoint-351_torch.bfloat16_lf,0.621,0.686842945161901,0.621,0.6417441253605001
|
results/mgtv-llama3_p2_en_full_metrics.csv
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
epoch,model,accuracy,precision,recall,f1
|
|
|
2 |
0.3333333333333333,meta-llama/Meta-Llama-3-8B-Instruct/checkpoint-117_torch.bfloat16_lf,0.6203333333333333,0.663582082981778,0.6203333333333333,0.6363626392286635
|
3 |
0.6666666666666666,meta-llama/Meta-Llama-3-8B-Instruct/checkpoint-234_torch.bfloat16_lf,0.5613333333333334,0.7000506187405509,0.5613333333333334,0.6113039056178092
|
4 |
1.0,meta-llama/Meta-Llama-3-8B-Instruct/checkpoint-351_torch.bfloat16_lf,0.6203333333333333,0.6819200833733873,0.6203333333333333,0.6405153767205392
|
|
|
1 |
epoch,model,accuracy,precision,recall,f1
|
2 |
+
0.0,meta-llama/Meta-Llama-3-8B-Instruct_torch.bfloat16_lf,0.063,0.4243897916751269,0.063,0.07661645283033172
|
3 |
0.3333333333333333,meta-llama/Meta-Llama-3-8B-Instruct/checkpoint-117_torch.bfloat16_lf,0.6203333333333333,0.663582082981778,0.6203333333333333,0.6363626392286635
|
4 |
0.6666666666666666,meta-llama/Meta-Llama-3-8B-Instruct/checkpoint-234_torch.bfloat16_lf,0.5613333333333334,0.7000506187405509,0.5613333333333334,0.6113039056178092
|
5 |
1.0,meta-llama/Meta-Llama-3-8B-Instruct/checkpoint-351_torch.bfloat16_lf,0.6203333333333333,0.6819200833733873,0.6203333333333333,0.6405153767205392
|