NTaylor commited on
Commit
bebe690
·
1 Parent(s): 43b5430

edited how data was loaded as it was for unknown reasons causing out of scope errors...

Browse files
Files changed (1) hide show
  1. app.py +21 -42
app.py CHANGED
@@ -23,56 +23,35 @@ import gradio as gr
23
  from sklearn import datasets
24
  from sklearn.decomposition import PCA
25
  from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
 
26
 
27
  # load data
28
  iris = datasets.load_iris()
29
 
30
- X = iris.data
31
- y = iris.target
32
  target_names = iris.target_names
33
 
34
- def plot_lda_pca(n_samples = 100,
35
- n_components=2,
36
- n_features=4):
37
-
38
- '''
39
- Function to plot LDA and PCA clustering.
40
-
41
- Parameters
42
- ----------
43
- n_components : int, default=2
44
- Number of components to keep.
45
 
46
- n_features : int, default=5
47
- Number of features to generate.
48
-
49
- Returns
50
- -------
51
- fig : matplotlib.pyplot.figure
52
- Figure object.
53
- '''
54
-
55
 
 
56
 
57
- # take sample of data
58
- X = X[:n_samples, :n_features]
59
- y = y[:n_samples]
 
60
 
61
  # fit PCA
62
- pca = PCA(n_components=n_components)
63
  X_r = pca.fit(X).transform(X)
64
- print(f"shape of X_r: {X_r.shape}")
65
  # fit LDA
66
- lda = LinearDiscriminantAnalysis(n_components=n_components)
67
  X_r2 = lda.fit(X, y).transform(X)
68
- print(f"shape of X_r2: {X_r2.shape}")
69
- # take first two components
70
- X_r = X_r[:, :2]
71
- X_r2 = X_r2[:, :2]
72
-
73
- print(f"shape of X_r after: {X_r.shape}")
74
- print(f"shape of X_r2 after: {X_r2.shape}")
75
-
76
  # Percentage of variance explained for each components
77
  print(
78
  "explained variance ratio (first two components): %s"
@@ -119,15 +98,15 @@ with gr.Blocks(title=title) as demo:
119
  gr.Markdown(" Different number of features and number of components affect how well the low rank space is recovered. <br>"
120
  " Larger Depth trying to overfit and learn even the finner details of the data.<br>"
121
  )
122
- # set max samples
123
  max_samples = len(iris.data)
124
- with gr.Row():
125
- n_samples = gr.Slider(value=100, minimum=2, maximum=max_samples, step=1, label="n_samples")
126
- n_features = gr.Slider(value=4, minimum=2, maximum=4, step=1, label="n_features")
127
-
128
 
 
 
129
  btn = gr.Button(value="Run")
130
- btn.click(plot_lda_pca,inputs= [n_samples, n_features], outputs= gr.Plot(label='PCA vs LDA clustering') ) #
131
 
132
 
133
  demo.launch()
 
23
  from sklearn import datasets
24
  from sklearn.decomposition import PCA
25
  from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
26
+ import numpy as np
27
 
28
  # load data
29
  iris = datasets.load_iris()
30
 
31
+ all_X = iris.data
32
+ all_y = iris.target
33
  target_names = iris.target_names
34
 
35
+ # save models using skop
 
 
 
 
 
 
 
 
 
 
36
 
37
+
38
+ def plot_lda_pca(n_samples = 50, n_features = 4):
 
 
 
 
 
 
 
39
 
40
+ # print(f"all X is: {all_X}")
41
 
42
+ idx = np.random.randint(0, len(iris.data), n_samples)
43
+ # sub-sample
44
+ X = all_X[idx, :n_features]
45
+ y = all_y[idx]
46
 
47
  # fit PCA
48
+ pca = PCA(n_components=2)
49
  X_r = pca.fit(X).transform(X)
50
+
51
  # fit LDA
52
+ lda = LinearDiscriminantAnalysis(n_components=2)
53
  X_r2 = lda.fit(X, y).transform(X)
54
+
 
 
 
 
 
 
 
55
  # Percentage of variance explained for each components
56
  print(
57
  "explained variance ratio (first two components): %s"
 
98
  gr.Markdown(" Different number of features and number of components affect how well the low rank space is recovered. <br>"
99
  " Larger Depth trying to overfit and learn even the finner details of the data.<br>"
100
  )
101
+
102
  max_samples = len(iris.data)
103
+ with gr.Row():
104
+ n_samples = gr.Slider(value=100, minimum=10, maximum=max_samples, step=10, label="n_samples")
 
 
105
 
106
+ n_features = gr.Slider(value=2, minimum=2, maximum=4, step=1, label="n_features")
107
+
108
  btn = gr.Button(value="Run")
109
+ btn.click(plot_lda_pca, inputs = [n_samples, n_features], outputs= gr.Plot(label='PCA vs LDA clustering') ) #
110
 
111
 
112
  demo.launch()