Scrya commited on
Commit
a4067ef
1 Parent(s): 614a959

update model card README.md

Browse files
README.md ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - id
4
+ license: apache-2.0
5
+ tags:
6
+ - whisper-event
7
+ - generated_from_trainer
8
+ model-index:
9
+ - name: Whisper Medium ID - FLEURS-CV
10
+ results: []
11
+ ---
12
+
13
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
14
+ should probably proofread and complete it, then remove this comment. -->
15
+
16
+ # Whisper Medium ID - FLEURS-CV
17
+
18
+ This model is a fine-tuned version of [openai/whisper-medium](https://huggingface.co/openai/whisper-medium) on the None dataset.
19
+ It achieves the following results on the evaluation set:
20
+ - eval_loss: 0.2563
21
+ - eval_wer: 8.4690
22
+ - eval_runtime: 2961.9108
23
+ - eval_samples_per_second: 1.453
24
+ - eval_steps_per_second: 0.091
25
+ - epoch: 14.29
26
+ - step: 5000
27
+
28
+ ## Model description
29
+
30
+ More information needed
31
+
32
+ ## Intended uses & limitations
33
+
34
+ More information needed
35
+
36
+ ## Training and evaluation data
37
+
38
+ More information needed
39
+
40
+ ## Training procedure
41
+
42
+ ### Training hyperparameters
43
+
44
+ The following hyperparameters were used during training:
45
+ - learning_rate: 1e-05
46
+ - train_batch_size: 32
47
+ - eval_batch_size: 16
48
+ - seed: 42
49
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
50
+ - lr_scheduler_type: linear
51
+ - lr_scheduler_warmup_steps: 500
52
+ - training_steps: 10000
53
+ - mixed_precision_training: Native AMP
54
+
55
+ ### Framework versions
56
+
57
+ - Transformers 4.26.0.dev0
58
+ - Pytorch 1.13.1+cu117
59
+ - Datasets 2.7.1.dev0
60
+ - Tokenizers 0.13.2
fine-tune-whisper-non-streaming-id.ipynb CHANGED
@@ -5972,7 +5972,7 @@
5972
  },
5973
  {
5974
  "cell_type": "code",
5975
- "execution_count": null,
5976
  "id": "c704f91e-241b-48c9-b8e0-f0da396a9663",
5977
  "metadata": {
5978
  "id": "c704f91e-241b-48c9-b8e0-f0da396a9663"
@@ -5980,8 +5980,8 @@
5980
  "outputs": [],
5981
  "source": [
5982
  "kwargs = {\n",
5983
- " \"dataset_tags\": [\"google/fleurs\", \"mozilla-foundation/common_voice_11_0\"],\n",
5984
- " \"dataset\": [\"FLEURS\", \"Common Voice 11.0\"], # a 'pretty' name for the training dataset\n",
5985
  " \"language\": \"id\",\n",
5986
  " \"model_name\": \"Whisper Medium ID - FLEURS-CV\", # a 'pretty' name for your model\n",
5987
  " \"finetuned_from\": \"openai/whisper-medium\",\n",
@@ -6007,7 +6007,46 @@
6007
  "metadata": {
6008
  "id": "d7030622-caf7-4039-939b-6195cdaa2585"
6009
  },
6010
- "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6011
  "source": [
6012
  "trainer.push_to_hub(**kwargs)"
6013
  ]
 
5972
  },
5973
  {
5974
  "cell_type": "code",
5975
+ "execution_count": 24,
5976
  "id": "c704f91e-241b-48c9-b8e0-f0da396a9663",
5977
  "metadata": {
5978
  "id": "c704f91e-241b-48c9-b8e0-f0da396a9663"
 
5980
  "outputs": [],
5981
  "source": [
5982
  "kwargs = {\n",
5983
+ "# \"dataset_tags\": [\"google/fleurs\", \"mozilla-foundation/common_voice_11_0\"],\n",
5984
+ "# \"dataset\": [\"FLEURS\", \"Common Voice 11.0\"], # a 'pretty' name for the training dataset\n",
5985
  " \"language\": \"id\",\n",
5986
  " \"model_name\": \"Whisper Medium ID - FLEURS-CV\", # a 'pretty' name for your model\n",
5987
  " \"finetuned_from\": \"openai/whisper-medium\",\n",
 
6007
  "metadata": {
6008
  "id": "d7030622-caf7-4039-939b-6195cdaa2585"
6009
  },
6010
+ "outputs": [
6011
+ {
6012
+ "name": "stderr",
6013
+ "output_type": "stream",
6014
+ "text": [
6015
+ "Saving model checkpoint to ./\n",
6016
+ "Configuration saved in ./config.json\n",
6017
+ "Model weights saved in ./pytorch_model.bin\n",
6018
+ "Feature extractor saved in ./preprocessor_config.json\n"
6019
+ ]
6020
+ },
6021
+ {
6022
+ "data": {
6023
+ "application/vnd.jupyter.widget-view+json": {
6024
+ "model_id": "88c804e93e1844dd8eca6aea753e7ef8",
6025
+ "version_major": 2,
6026
+ "version_minor": 0
6027
+ },
6028
+ "text/plain": [
6029
+ "Upload file pytorch_model.bin: 0%| | 32.0k/2.85G [00:00<?, ?B/s]"
6030
+ ]
6031
+ },
6032
+ "metadata": {},
6033
+ "output_type": "display_data"
6034
+ },
6035
+ {
6036
+ "data": {
6037
+ "application/vnd.jupyter.widget-view+json": {
6038
+ "model_id": "be47f0485aa74ae6aa86faa95a079369",
6039
+ "version_major": 2,
6040
+ "version_minor": 0
6041
+ },
6042
+ "text/plain": [
6043
+ "Upload file runs/Dec17_13-22-39_150-136-214-225/events.out.tfevents.1671283363.150-136-214-225.126569.0: 85%|…"
6044
+ ]
6045
+ },
6046
+ "metadata": {},
6047
+ "output_type": "display_data"
6048
+ }
6049
+ ],
6050
  "source": [
6051
  "trainer.push_to_hub(**kwargs)"
6052
  ]