Upload folder using huggingface_hub
Browse files- checkpoint-760/config.json +188 -0
- checkpoint-760/model.safetensors +3 -0
- checkpoint-760/optimizer.pt +3 -0
- checkpoint-760/preprocessor_config.json +36 -0
- checkpoint-760/rng_state.pth +3 -0
- checkpoint-760/scheduler.pt +3 -0
- checkpoint-760/trainer_state.json +220 -0
- checkpoint-760/training_args.bin +3 -0
- config.json +1 -1
- model.safetensors +1 -1
- training_args.bin +1 -1
checkpoint-760/config.json
ADDED
@@ -0,0 +1,188 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "dima806/indian_food_image_detection",
|
3 |
+
"architectures": [
|
4 |
+
"ViTForImageClassification"
|
5 |
+
],
|
6 |
+
"attention_probs_dropout_prob": 0.0,
|
7 |
+
"encoder_stride": 16,
|
8 |
+
"hidden_act": "gelu",
|
9 |
+
"hidden_dropout_prob": 0.0,
|
10 |
+
"hidden_size": 768,
|
11 |
+
"id2label": {
|
12 |
+
"0": "adhirasam",
|
13 |
+
"1": "aloo_gobi",
|
14 |
+
"2": "aloo_matar",
|
15 |
+
"3": "aloo_methi",
|
16 |
+
"4": "aloo_shimla_mirch",
|
17 |
+
"5": "aloo_tikki",
|
18 |
+
"6": "anarsa",
|
19 |
+
"7": "ariselu",
|
20 |
+
"8": "bandar_laddu",
|
21 |
+
"9": "basundi",
|
22 |
+
"10": "bhatura",
|
23 |
+
"11": "bhindi_masala",
|
24 |
+
"12": "biryani",
|
25 |
+
"13": "boondi",
|
26 |
+
"14": "butter_chicken",
|
27 |
+
"15": "chak_hao_kheer",
|
28 |
+
"16": "cham_cham",
|
29 |
+
"17": "chana_masala",
|
30 |
+
"18": "chapati",
|
31 |
+
"19": "chhena_kheeri",
|
32 |
+
"20": "chicken_razala",
|
33 |
+
"21": "chicken_tikka",
|
34 |
+
"22": "chicken_tikka_masala",
|
35 |
+
"23": "chikki",
|
36 |
+
"24": "daal_baati_churma",
|
37 |
+
"25": "daal_puri",
|
38 |
+
"26": "dal_makhani",
|
39 |
+
"27": "dal_tadka",
|
40 |
+
"28": "dharwad_pedha",
|
41 |
+
"29": "doodhpak",
|
42 |
+
"30": "double_ka_meetha",
|
43 |
+
"31": "dum_aloo",
|
44 |
+
"32": "gajar_ka_halwa",
|
45 |
+
"33": "gavvalu",
|
46 |
+
"34": "ghevar",
|
47 |
+
"35": "gulab_jamun",
|
48 |
+
"36": "imarti",
|
49 |
+
"37": "jalebi",
|
50 |
+
"38": "kachori",
|
51 |
+
"39": "kadai_paneer",
|
52 |
+
"40": "kadhi_pakoda",
|
53 |
+
"41": "kajjikaya",
|
54 |
+
"42": "kakinada_khaja",
|
55 |
+
"43": "kalakand",
|
56 |
+
"44": "karela_bharta",
|
57 |
+
"45": "kofta",
|
58 |
+
"46": "kuzhi_paniyaram",
|
59 |
+
"47": "lassi",
|
60 |
+
"48": "ledikeni",
|
61 |
+
"49": "litti_chokha",
|
62 |
+
"50": "lyangcha",
|
63 |
+
"51": "maach_jhol",
|
64 |
+
"52": "makki_di_roti_sarson_da_saag",
|
65 |
+
"53": "malapua",
|
66 |
+
"54": "misi_roti",
|
67 |
+
"55": "misti_doi",
|
68 |
+
"56": "modak",
|
69 |
+
"57": "mysore_pak",
|
70 |
+
"58": "naan",
|
71 |
+
"59": "navrattan_korma",
|
72 |
+
"60": "palak_paneer",
|
73 |
+
"61": "paneer_butter_masala",
|
74 |
+
"62": "phirni",
|
75 |
+
"63": "pithe",
|
76 |
+
"64": "poha",
|
77 |
+
"65": "poornalu",
|
78 |
+
"66": "pootharekulu",
|
79 |
+
"67": "qubani_ka_meetha",
|
80 |
+
"68": "rabri",
|
81 |
+
"69": "ras_malai",
|
82 |
+
"70": "rasgulla",
|
83 |
+
"71": "sandesh",
|
84 |
+
"72": "shankarpali",
|
85 |
+
"73": "sheer_korma",
|
86 |
+
"74": "sheera",
|
87 |
+
"75": "shrikhand",
|
88 |
+
"76": "sohan_halwa",
|
89 |
+
"77": "sohan_papdi",
|
90 |
+
"78": "sutar_feni",
|
91 |
+
"79": "unni_appam"
|
92 |
+
},
|
93 |
+
"image_size": 224,
|
94 |
+
"initializer_range": 0.02,
|
95 |
+
"intermediate_size": 3072,
|
96 |
+
"label2id": {
|
97 |
+
"adhirasam": 0,
|
98 |
+
"aloo_gobi": 1,
|
99 |
+
"aloo_matar": 2,
|
100 |
+
"aloo_methi": 3,
|
101 |
+
"aloo_shimla_mirch": 4,
|
102 |
+
"aloo_tikki": 5,
|
103 |
+
"anarsa": 6,
|
104 |
+
"ariselu": 7,
|
105 |
+
"bandar_laddu": 8,
|
106 |
+
"basundi": 9,
|
107 |
+
"bhatura": 10,
|
108 |
+
"bhindi_masala": 11,
|
109 |
+
"biryani": 12,
|
110 |
+
"boondi": 13,
|
111 |
+
"butter_chicken": 14,
|
112 |
+
"chak_hao_kheer": 15,
|
113 |
+
"cham_cham": 16,
|
114 |
+
"chana_masala": 17,
|
115 |
+
"chapati": 18,
|
116 |
+
"chhena_kheeri": 19,
|
117 |
+
"chicken_razala": 20,
|
118 |
+
"chicken_tikka": 21,
|
119 |
+
"chicken_tikka_masala": 22,
|
120 |
+
"chikki": 23,
|
121 |
+
"daal_baati_churma": 24,
|
122 |
+
"daal_puri": 25,
|
123 |
+
"dal_makhani": 26,
|
124 |
+
"dal_tadka": 27,
|
125 |
+
"dharwad_pedha": 28,
|
126 |
+
"doodhpak": 29,
|
127 |
+
"double_ka_meetha": 30,
|
128 |
+
"dum_aloo": 31,
|
129 |
+
"gajar_ka_halwa": 32,
|
130 |
+
"gavvalu": 33,
|
131 |
+
"ghevar": 34,
|
132 |
+
"gulab_jamun": 35,
|
133 |
+
"imarti": 36,
|
134 |
+
"jalebi": 37,
|
135 |
+
"kachori": 38,
|
136 |
+
"kadai_paneer": 39,
|
137 |
+
"kadhi_pakoda": 40,
|
138 |
+
"kajjikaya": 41,
|
139 |
+
"kakinada_khaja": 42,
|
140 |
+
"kalakand": 43,
|
141 |
+
"karela_bharta": 44,
|
142 |
+
"kofta": 45,
|
143 |
+
"kuzhi_paniyaram": 46,
|
144 |
+
"lassi": 47,
|
145 |
+
"ledikeni": 48,
|
146 |
+
"litti_chokha": 49,
|
147 |
+
"lyangcha": 50,
|
148 |
+
"maach_jhol": 51,
|
149 |
+
"makki_di_roti_sarson_da_saag": 52,
|
150 |
+
"malapua": 53,
|
151 |
+
"misi_roti": 54,
|
152 |
+
"misti_doi": 55,
|
153 |
+
"modak": 56,
|
154 |
+
"mysore_pak": 57,
|
155 |
+
"naan": 58,
|
156 |
+
"navrattan_korma": 59,
|
157 |
+
"palak_paneer": 60,
|
158 |
+
"paneer_butter_masala": 61,
|
159 |
+
"phirni": 62,
|
160 |
+
"pithe": 63,
|
161 |
+
"poha": 64,
|
162 |
+
"poornalu": 65,
|
163 |
+
"pootharekulu": 66,
|
164 |
+
"qubani_ka_meetha": 67,
|
165 |
+
"rabri": 68,
|
166 |
+
"ras_malai": 69,
|
167 |
+
"rasgulla": 70,
|
168 |
+
"sandesh": 71,
|
169 |
+
"shankarpali": 72,
|
170 |
+
"sheer_korma": 73,
|
171 |
+
"sheera": 74,
|
172 |
+
"shrikhand": 75,
|
173 |
+
"sohan_halwa": 76,
|
174 |
+
"sohan_papdi": 77,
|
175 |
+
"sutar_feni": 78,
|
176 |
+
"unni_appam": 79
|
177 |
+
},
|
178 |
+
"layer_norm_eps": 1e-12,
|
179 |
+
"model_type": "vit",
|
180 |
+
"num_attention_heads": 12,
|
181 |
+
"num_channels": 3,
|
182 |
+
"num_hidden_layers": 12,
|
183 |
+
"patch_size": 16,
|
184 |
+
"problem_type": "single_label_classification",
|
185 |
+
"qkv_bias": true,
|
186 |
+
"torch_dtype": "float32",
|
187 |
+
"transformers_version": "4.41.0"
|
188 |
+
}
|
checkpoint-760/model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:904cae83162f86f717628a6a067ddcffb5bc49262e0755087ca9d212f2efe91d
|
3 |
+
size 343463912
|
checkpoint-760/optimizer.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a09258100b6ccfdc7808f3b22f5c41351f8aefd224f947ae02b99a8d6c7f5e8e
|
3 |
+
size 687048325
|
checkpoint-760/preprocessor_config.json
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_valid_processor_keys": [
|
3 |
+
"images",
|
4 |
+
"do_resize",
|
5 |
+
"size",
|
6 |
+
"resample",
|
7 |
+
"do_rescale",
|
8 |
+
"rescale_factor",
|
9 |
+
"do_normalize",
|
10 |
+
"image_mean",
|
11 |
+
"image_std",
|
12 |
+
"return_tensors",
|
13 |
+
"data_format",
|
14 |
+
"input_data_format"
|
15 |
+
],
|
16 |
+
"do_normalize": true,
|
17 |
+
"do_rescale": true,
|
18 |
+
"do_resize": true,
|
19 |
+
"image_mean": [
|
20 |
+
0.5,
|
21 |
+
0.5,
|
22 |
+
0.5
|
23 |
+
],
|
24 |
+
"image_processor_type": "ViTImageProcessor",
|
25 |
+
"image_std": [
|
26 |
+
0.5,
|
27 |
+
0.5,
|
28 |
+
0.5
|
29 |
+
],
|
30 |
+
"resample": 2,
|
31 |
+
"rescale_factor": 0.00392156862745098,
|
32 |
+
"size": {
|
33 |
+
"height": 224,
|
34 |
+
"width": 224
|
35 |
+
}
|
36 |
+
}
|
checkpoint-760/rng_state.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5c0a846fa1a349bd101adba9ce04fc16f2087047036f77bf1f8a20ce411dd66e
|
3 |
+
size 14575
|
checkpoint-760/scheduler.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bdf70e3e5428471a99e3fa4b191a37179817877c949b68bb85ad578bd7dab3b5
|
3 |
+
size 627
|
checkpoint-760/trainer_state.json
ADDED
@@ -0,0 +1,220 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": 4.174466133117676,
|
3 |
+
"best_model_checkpoint": "indian_food_image_detection/checkpoint-760",
|
4 |
+
"epoch": 20.0,
|
5 |
+
"eval_steps": 500,
|
6 |
+
"global_step": 760,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 1.0,
|
13 |
+
"eval_accuracy": 0.044375,
|
14 |
+
"eval_loss": 4.3319478034973145,
|
15 |
+
"eval_runtime": 24.3359,
|
16 |
+
"eval_samples_per_second": 65.746,
|
17 |
+
"eval_steps_per_second": 2.055,
|
18 |
+
"step": 38
|
19 |
+
},
|
20 |
+
{
|
21 |
+
"epoch": 2.0,
|
22 |
+
"eval_accuracy": 0.0575,
|
23 |
+
"eval_loss": 4.318070411682129,
|
24 |
+
"eval_runtime": 24.1999,
|
25 |
+
"eval_samples_per_second": 66.116,
|
26 |
+
"eval_steps_per_second": 2.066,
|
27 |
+
"step": 76
|
28 |
+
},
|
29 |
+
{
|
30 |
+
"epoch": 3.0,
|
31 |
+
"eval_accuracy": 0.068125,
|
32 |
+
"eval_loss": 4.3038434982299805,
|
33 |
+
"eval_runtime": 24.0128,
|
34 |
+
"eval_samples_per_second": 66.631,
|
35 |
+
"eval_steps_per_second": 2.082,
|
36 |
+
"step": 114
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"epoch": 4.0,
|
40 |
+
"eval_accuracy": 0.080625,
|
41 |
+
"eval_loss": 4.289785385131836,
|
42 |
+
"eval_runtime": 23.7984,
|
43 |
+
"eval_samples_per_second": 67.231,
|
44 |
+
"eval_steps_per_second": 2.101,
|
45 |
+
"step": 152
|
46 |
+
},
|
47 |
+
{
|
48 |
+
"epoch": 5.0,
|
49 |
+
"eval_accuracy": 0.106875,
|
50 |
+
"eval_loss": 4.276332378387451,
|
51 |
+
"eval_runtime": 23.7791,
|
52 |
+
"eval_samples_per_second": 67.286,
|
53 |
+
"eval_steps_per_second": 2.103,
|
54 |
+
"step": 190
|
55 |
+
},
|
56 |
+
{
|
57 |
+
"epoch": 6.0,
|
58 |
+
"eval_accuracy": 0.1225,
|
59 |
+
"eval_loss": 4.263319492340088,
|
60 |
+
"eval_runtime": 23.8978,
|
61 |
+
"eval_samples_per_second": 66.952,
|
62 |
+
"eval_steps_per_second": 2.092,
|
63 |
+
"step": 228
|
64 |
+
},
|
65 |
+
{
|
66 |
+
"epoch": 7.0,
|
67 |
+
"eval_accuracy": 0.135,
|
68 |
+
"eval_loss": 4.251179218292236,
|
69 |
+
"eval_runtime": 24.0951,
|
70 |
+
"eval_samples_per_second": 66.404,
|
71 |
+
"eval_steps_per_second": 2.075,
|
72 |
+
"step": 266
|
73 |
+
},
|
74 |
+
{
|
75 |
+
"epoch": 8.0,
|
76 |
+
"eval_accuracy": 0.154375,
|
77 |
+
"eval_loss": 4.240114212036133,
|
78 |
+
"eval_runtime": 24.0038,
|
79 |
+
"eval_samples_per_second": 66.656,
|
80 |
+
"eval_steps_per_second": 2.083,
|
81 |
+
"step": 304
|
82 |
+
},
|
83 |
+
{
|
84 |
+
"epoch": 9.0,
|
85 |
+
"eval_accuracy": 0.16875,
|
86 |
+
"eval_loss": 4.2298784255981445,
|
87 |
+
"eval_runtime": 23.8349,
|
88 |
+
"eval_samples_per_second": 67.128,
|
89 |
+
"eval_steps_per_second": 2.098,
|
90 |
+
"step": 342
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"epoch": 10.0,
|
94 |
+
"eval_accuracy": 0.18375,
|
95 |
+
"eval_loss": 4.2202229499816895,
|
96 |
+
"eval_runtime": 23.8506,
|
97 |
+
"eval_samples_per_second": 67.084,
|
98 |
+
"eval_steps_per_second": 2.096,
|
99 |
+
"step": 380
|
100 |
+
},
|
101 |
+
{
|
102 |
+
"epoch": 11.0,
|
103 |
+
"eval_accuracy": 0.195625,
|
104 |
+
"eval_loss": 4.211514472961426,
|
105 |
+
"eval_runtime": 23.8586,
|
106 |
+
"eval_samples_per_second": 67.062,
|
107 |
+
"eval_steps_per_second": 2.096,
|
108 |
+
"step": 418
|
109 |
+
},
|
110 |
+
{
|
111 |
+
"epoch": 12.0,
|
112 |
+
"eval_accuracy": 0.205625,
|
113 |
+
"eval_loss": 4.203894138336182,
|
114 |
+
"eval_runtime": 23.9558,
|
115 |
+
"eval_samples_per_second": 66.79,
|
116 |
+
"eval_steps_per_second": 2.087,
|
117 |
+
"step": 456
|
118 |
+
},
|
119 |
+
{
|
120 |
+
"epoch": 13.0,
|
121 |
+
"eval_accuracy": 0.221875,
|
122 |
+
"eval_loss": 4.197005748748779,
|
123 |
+
"eval_runtime": 23.8689,
|
124 |
+
"eval_samples_per_second": 67.033,
|
125 |
+
"eval_steps_per_second": 2.095,
|
126 |
+
"step": 494
|
127 |
+
},
|
128 |
+
{
|
129 |
+
"epoch": 13.157894736842104,
|
130 |
+
"grad_norm": 1.008223533630371,
|
131 |
+
"learning_rate": 1.0985915492957747e-06,
|
132 |
+
"loss": 4.2353,
|
133 |
+
"step": 500
|
134 |
+
},
|
135 |
+
{
|
136 |
+
"epoch": 14.0,
|
137 |
+
"eval_accuracy": 0.2325,
|
138 |
+
"eval_loss": 4.19118595123291,
|
139 |
+
"eval_runtime": 23.7272,
|
140 |
+
"eval_samples_per_second": 67.433,
|
141 |
+
"eval_steps_per_second": 2.107,
|
142 |
+
"step": 532
|
143 |
+
},
|
144 |
+
{
|
145 |
+
"epoch": 15.0,
|
146 |
+
"eval_accuracy": 0.2425,
|
147 |
+
"eval_loss": 4.1860456466674805,
|
148 |
+
"eval_runtime": 24.1087,
|
149 |
+
"eval_samples_per_second": 66.366,
|
150 |
+
"eval_steps_per_second": 2.074,
|
151 |
+
"step": 570
|
152 |
+
},
|
153 |
+
{
|
154 |
+
"epoch": 16.0,
|
155 |
+
"eval_accuracy": 0.248125,
|
156 |
+
"eval_loss": 4.18190860748291,
|
157 |
+
"eval_runtime": 23.8517,
|
158 |
+
"eval_samples_per_second": 67.081,
|
159 |
+
"eval_steps_per_second": 2.096,
|
160 |
+
"step": 608
|
161 |
+
},
|
162 |
+
{
|
163 |
+
"epoch": 17.0,
|
164 |
+
"eval_accuracy": 0.250625,
|
165 |
+
"eval_loss": 4.1787238121032715,
|
166 |
+
"eval_runtime": 23.9002,
|
167 |
+
"eval_samples_per_second": 66.945,
|
168 |
+
"eval_steps_per_second": 2.092,
|
169 |
+
"step": 646
|
170 |
+
},
|
171 |
+
{
|
172 |
+
"epoch": 18.0,
|
173 |
+
"eval_accuracy": 0.254375,
|
174 |
+
"eval_loss": 4.1763739585876465,
|
175 |
+
"eval_runtime": 23.977,
|
176 |
+
"eval_samples_per_second": 66.731,
|
177 |
+
"eval_steps_per_second": 2.085,
|
178 |
+
"step": 684
|
179 |
+
},
|
180 |
+
{
|
181 |
+
"epoch": 19.0,
|
182 |
+
"eval_accuracy": 0.25625,
|
183 |
+
"eval_loss": 4.17495059967041,
|
184 |
+
"eval_runtime": 24.0494,
|
185 |
+
"eval_samples_per_second": 66.53,
|
186 |
+
"eval_steps_per_second": 2.079,
|
187 |
+
"step": 722
|
188 |
+
},
|
189 |
+
{
|
190 |
+
"epoch": 20.0,
|
191 |
+
"eval_accuracy": 0.256875,
|
192 |
+
"eval_loss": 4.174466133117676,
|
193 |
+
"eval_runtime": 23.7897,
|
194 |
+
"eval_samples_per_second": 67.256,
|
195 |
+
"eval_steps_per_second": 2.102,
|
196 |
+
"step": 760
|
197 |
+
}
|
198 |
+
],
|
199 |
+
"logging_steps": 500,
|
200 |
+
"max_steps": 760,
|
201 |
+
"num_input_tokens_seen": 0,
|
202 |
+
"num_train_epochs": 20,
|
203 |
+
"save_steps": 500,
|
204 |
+
"stateful_callbacks": {
|
205 |
+
"TrainerControl": {
|
206 |
+
"args": {
|
207 |
+
"should_epoch_stop": false,
|
208 |
+
"should_evaluate": false,
|
209 |
+
"should_log": false,
|
210 |
+
"should_save": true,
|
211 |
+
"should_training_stop": true
|
212 |
+
},
|
213 |
+
"attributes": {}
|
214 |
+
}
|
215 |
+
},
|
216 |
+
"total_flos": 3.722215845003264e+18,
|
217 |
+
"train_batch_size": 64,
|
218 |
+
"trial_name": null,
|
219 |
+
"trial_params": null
|
220 |
+
}
|
checkpoint-760/training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:76ad5da40b1b24e860ea110d7fedec0eea1b75e4acb5dfd773a606f0d1a04a17
|
3 |
+
size 4603
|
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"architectures": [
|
4 |
"ViTForImageClassification"
|
5 |
],
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "dima806/indian_food_image_detection",
|
3 |
"architectures": [
|
4 |
"ViTForImageClassification"
|
5 |
],
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 343463912
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:904cae83162f86f717628a6a067ddcffb5bc49262e0755087ca9d212f2efe91d
|
3 |
size 343463912
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4603
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:76ad5da40b1b24e860ea110d7fedec0eea1b75e4acb5dfd773a606f0d1a04a17
|
3 |
size 4603
|