langdonholmes commited on
Commit
d502261
1 Parent(s): 2c62877

Update spaCy pipeline

Browse files
.ipynb_checkpoints/train_steps_log-checkpoint.txt ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ E # LOSS TRANS... LOSS NER ENTS_F ENTS_P ENTS_R SCORE
2
+ --- ------ ------------- -------- ------ ------ ------ ------
3
+ 0 0 1193.22 3047.90 0.05 0.02 44.30 0.40
4
+ 0 200 89606.84 194914.17 3.20 1.66 41.67 0.38
5
+ 0 400 56010.79 43259.73 2.17 1.13 27.19 0.25
6
+ 0 600 237.83 43446.87 4.71 2.45 60.09 0.54
7
+ 1 800 37115.10 42706.92 4.77 2.48 60.09 0.54
8
+ 1 1000 130796.25 44942.18 4.71 2.45 59.65 0.54
9
+ 1 1200 99605.99 47664.35 5.61 2.92 71.49 0.65
10
+ 2 1400 146509.33 41937.85 5.47 2.85 69.74 0.63
11
+ 2 1600 94198.77 40928.37 5.94 3.09 75.44 0.68
12
+ 2 1800 134.86 37308.57 4.85 2.52 61.40 0.56
13
+ 2 2000 59529.06 40560.74 5.60 2.92 71.05 0.64
14
+ 3 2200 1911.47 40079.10 5.93 3.09 75.44 0.68
15
+ 3 2400 16379.08 39410.89 6.00 3.12 76.32 0.69
16
+ 3 2600 181.07 30997.83 5.55 2.89 70.61 0.64
17
+ 4 2800 14297.08 26663.79 5.83 3.04 74.12 0.67
18
+ 4 3000 53522.17 24156.92 5.72 2.97 72.81 0.66
19
+ 4 3200 21847.06 19654.48 5.97 3.11 75.88 0.69
20
+ 4 3400 125771.16 16052.71 78.44 82.21 75.00 0.76
21
+ 5 3600 277230.48 11326.47 82.25 84.33 80.26 0.81
22
+ 5 3800 31085.45 6978.98 81.82 90.00 75.00 0.77
23
+ 5 4000 87.99 4278.98 84.72 89.71 80.26 0.81
24
+ 6 4200 150.64 2555.51 82.16 88.38 76.75 0.78
25
+ 6 4400 272414.49 1653.23 77.75 73.91 82.02 0.81
26
+ 6 4600 299.36 730.39 77.42 81.55 73.68 0.74
27
+ 6 4800 76129.61 489.18 75.48 73.44 77.63 0.77
28
+ 7 5000 125191.91 289.88 79.61 89.13 71.93 0.74
29
+ 7 5200 1227092.54 931.80 81.11 85.44 77.19 0.78
30
+ 7 5400 111861.60 211.50 80.28 86.36 75.00 0.76
31
+ 8 5600 15200.30 90.74 84.06 88.78 79.82 0.81
32
+ 8 5800 8954.63 100.49 83.56 87.14 80.26 0.81
33
+ 8 6000 67.66 22.01 84.82 86.36 83.33 0.84
34
+ 8 6200 1937881.38 368.50 85.51 91.50 80.26 0.81
35
+ 9 6400 2484790.71 907.41 83.63 84.38 82.89 0.83
36
+ 9 6600 163576.46 138.34 82.64 82.82 82.46 0.82
37
+ 9 6800 154802.09 158.06 82.61 81.90 83.33 0.83
38
+ 10 7000 562550.26 311.55 83.66 83.12 84.21 0.84
39
+ 10 7200 465103.58 327.55 79.72 85.07 75.00 0.76
40
+ 10 7400 248.32 50.10 79.82 79.82 79.82 0.80
41
+ 10 7600 229226.21 255.05 82.54 85.45 79.82 0.80
42
+ 11 7800 205179.70 156.31 82.92 86.26 79.82 0.80
43
+ 11 8000 602927.17 250.93 84.40 88.46 80.70 0.81
44
+ 11 8200 1972288.53 592.70 85.25 89.81 81.14 0.82
45
+ 12 8400 248.31 44.60 85.52 88.32 82.89 0.83
46
+ 12 8600 916323.58 316.59 84.63 85.97 83.33 0.84
47
+ 12 8800 56.26 16.94 80.75 77.20 84.65 0.84
48
+ 12 9000 503.44 54.06 83.94 87.98 80.26 0.81
README.md CHANGED
@@ -13,11 +13,11 @@ model-index:
13
  metrics:
14
  - name: NER Precision
15
  type: precision
16
- value: 0.6353383459
17
  - name: NER Recall
18
  type: recall
19
- value: 0.8164251208
20
  - name: NER F Score
21
  type: f_score
22
- value: 0.7145877378
23
  ---
 
13
  metrics:
14
  - name: NER Precision
15
  type: precision
16
+ value: 0.8311688312
17
  - name: NER Recall
18
  type: recall
19
+ value: 0.8421052632
20
  - name: NER F Score
21
  type: f_score
22
+ value: 0.8366013072
23
  ---
en_student_name_detector-any-py3-none-any.whl CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a567a8c3089483ce5e0b6badf85e3dd123c43273e7af1bf9060d26b7f4a8d13e
3
- size 521547636
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7471fd88a60cd66107c1a0a13589ca36f88458c385edae26957454d19eb8c42
3
+ size 521550516
meta.json CHANGED
@@ -7,8 +7,8 @@
7
  "email":"",
8
  "url":"",
9
  "license":"",
10
- "spacy_version":">=3.3.1,<3.4.0",
11
- "spacy_git_version":"5fb597f77",
12
  "vectors":{
13
  "width":0,
14
  "vectors":0,
@@ -35,19 +35,19 @@
35
 
36
  ],
37
  "performance":{
38
- "ents_f":0.7145877378,
39
- "ents_p":0.6353383459,
40
- "ents_r":0.8164251208,
41
  "ents_per_type":{
42
  "STUDENT":{
43
- "p":0.6353383459,
44
- "r":0.8164251208,
45
- "f":0.7145877378
46
  }
47
  },
48
- "speed":21016.7125076686,
49
- "transformer_loss":264.9285582366,
50
- "ner_loss":160.91844294
51
  },
52
  "requirements":[
53
  "spacy-transformers>=1.1.7,<1.2.0"
 
7
  "email":"",
8
  "url":"",
9
  "license":"",
10
+ "spacy_version":">=3.4.1,<3.5.0",
11
+ "spacy_git_version":"5c2a00cef",
12
  "vectors":{
13
  "width":0,
14
  "vectors":0,
 
35
 
36
  ],
37
  "performance":{
38
+ "ents_f":0.8366013072,
39
+ "ents_p":0.8311688312,
40
+ "ents_r":0.8421052632,
41
  "ents_per_type":{
42
  "STUDENT":{
43
+ "p":0.8311688312,
44
+ "r":0.8421052632,
45
+ "f":0.8366013072
46
  }
47
  },
48
+ "speed":22300.9340042652,
49
+ "transformer_loss":562550.263513213,
50
+ "ner_loss":311.5489451973
51
  },
52
  "requirements":[
53
  "spacy-transformers>=1.1.7,<1.2.0"
train_steps_log.txt ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ E # LOSS TRANS... LOSS NER ENTS_F ENTS_P ENTS_R SCORE
2
+ --- ------ ------------- -------- ------ ------ ------ ------
3
+ 0 0 1193.22 3047.90 0.05 0.02 44.30 0.40
4
+ 0 200 89606.84 194914.17 3.20 1.66 41.67 0.38
5
+ 0 400 56010.79 43259.73 2.17 1.13 27.19 0.25
6
+ 0 600 237.83 43446.87 4.71 2.45 60.09 0.54
7
+ 1 800 37115.10 42706.92 4.77 2.48 60.09 0.54
8
+ 1 1000 130796.25 44942.18 4.71 2.45 59.65 0.54
9
+ 1 1200 99605.99 47664.35 5.61 2.92 71.49 0.65
10
+ 2 1400 146509.33 41937.85 5.47 2.85 69.74 0.63
11
+ 2 1600 94198.77 40928.37 5.94 3.09 75.44 0.68
12
+ 2 1800 134.86 37308.57 4.85 2.52 61.40 0.56
13
+ 2 2000 59529.06 40560.74 5.60 2.92 71.05 0.64
14
+ 3 2200 1911.47 40079.10 5.93 3.09 75.44 0.68
15
+ 3 2400 16379.08 39410.89 6.00 3.12 76.32 0.69
16
+ 3 2600 181.07 30997.83 5.55 2.89 70.61 0.64
17
+ 4 2800 14297.08 26663.79 5.83 3.04 74.12 0.67
18
+ 4 3000 53522.17 24156.92 5.72 2.97 72.81 0.66
19
+ 4 3200 21847.06 19654.48 5.97 3.11 75.88 0.69
20
+ 4 3400 125771.16 16052.71 78.44 82.21 75.00 0.76
21
+ 5 3600 277230.48 11326.47 82.25 84.33 80.26 0.81
22
+ 5 3800 31085.45 6978.98 81.82 90.00 75.00 0.77
23
+ 5 4000 87.99 4278.98 84.72 89.71 80.26 0.81
24
+ 6 4200 150.64 2555.51 82.16 88.38 76.75 0.78
25
+ 6 4400 272414.49 1653.23 77.75 73.91 82.02 0.81
26
+ 6 4600 299.36 730.39 77.42 81.55 73.68 0.74
27
+ 6 4800 76129.61 489.18 75.48 73.44 77.63 0.77
28
+ 7 5000 125191.91 289.88 79.61 89.13 71.93 0.74
29
+ 7 5200 1227092.54 931.80 81.11 85.44 77.19 0.78
30
+ 7 5400 111861.60 211.50 80.28 86.36 75.00 0.76
31
+ 8 5600 15200.30 90.74 84.06 88.78 79.82 0.81
32
+ 8 5800 8954.63 100.49 83.56 87.14 80.26 0.81
33
+ 8 6000 67.66 22.01 84.82 86.36 83.33 0.84
34
+ 8 6200 1937881.38 368.50 85.51 91.50 80.26 0.81
35
+ 9 6400 2484790.71 907.41 83.63 84.38 82.89 0.83
36
+ 9 6600 163576.46 138.34 82.64 82.82 82.46 0.82
37
+ 9 6800 154802.09 158.06 82.61 81.90 83.33 0.83
38
+ 10 7000 562550.26 311.55 83.66 83.12 84.21 0.84
39
+ 10 7200 465103.58 327.55 79.72 85.07 75.00 0.76
40
+ 10 7400 248.32 50.10 79.82 79.82 79.82 0.80
41
+ 10 7600 229226.21 255.05 82.54 85.45 79.82 0.80
42
+ 11 7800 205179.70 156.31 82.92 86.26 79.82 0.80
43
+ 11 8000 602927.17 250.93 84.40 88.46 80.70 0.81
44
+ 11 8200 1972288.53 592.70 85.25 89.81 81.14 0.82
45
+ 12 8400 248.31 44.60 85.52 88.32 82.89 0.83
46
+ 12 8600 916323.58 316.59 84.63 85.97 83.33 0.84
47
+ 12 8800 56.26 16.94 80.75 77.20 84.65 0.84
48
+ 12 9000 503.44 54.06 83.94 87.98 80.26 0.81