eaedk commited on
Commit
e3a840c
0 Parent(s):

Duplicate from eaedk/agri-tech-fastapi

Browse files
Files changed (8) hide show
  1. .gitattributes +34 -0
  2. .gitignore +162 -0
  3. Dockerfile +14 -0
  4. README.md +19 -0
  5. assets/ml/.gitkeep +0 -0
  6. assets/ml/crop_recommandation2.pkl +3 -0
  7. main.py +170 -0
  8. requirements.txt +44 -0
.gitattributes ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tflite filter=lfs diff=lfs merge=lfs -text
29
+ *.tgz filter=lfs diff=lfs merge=lfs -text
30
+ *.wasm filter=lfs diff=lfs merge=lfs -text
31
+ *.xz filter=lfs diff=lfs merge=lfs -text
32
+ *.zip filter=lfs diff=lfs merge=lfs -text
33
+ *.zst filter=lfs diff=lfs merge=lfs -text
34
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py,cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+
85
+ # pyenv
86
+ # For a library or package, you might want to ignore these files since the code is
87
+ # intended to run in multiple environments; otherwise, check them in:
88
+ # .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # poetry
98
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
100
+ # commonly ignored for libraries.
101
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102
+ #poetry.lock
103
+
104
+ # pdm
105
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106
+ #pdm.lock
107
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108
+ # in version control.
109
+ # https://pdm.fming.dev/#use-with-ide
110
+ .pdm.toml
111
+
112
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
113
+ __pypackages__/
114
+
115
+ # Celery stuff
116
+ celerybeat-schedule
117
+ celerybeat.pid
118
+
119
+ # SageMath parsed files
120
+ *.sage.py
121
+
122
+ # Environments
123
+ .env
124
+ .venv
125
+ env/
126
+ venv/
127
+ ENV/
128
+ env.bak/
129
+ venv.bak/
130
+
131
+ # Spyder project settings
132
+ .spyderproject
133
+ .spyproject
134
+
135
+ # Rope project settings
136
+ .ropeproject
137
+
138
+ # mkdocs documentation
139
+ /site
140
+
141
+ # mypy
142
+ .mypy_cache/
143
+ .dmypy.json
144
+ dmypy.json
145
+
146
+ # Pyre type checker
147
+ .pyre/
148
+
149
+ # pytype static type analyzer
150
+ .pytype/
151
+
152
+ # Cython debug symbols
153
+ cython_debug/
154
+
155
+ # PyCharm
156
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
157
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
158
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
159
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
160
+ #.idea/
161
+ .vscode/
162
+ assets/crop_recommandation2.pkl
Dockerfile ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9
2
+ #-slim
3
+
4
+ WORKDIR /app
5
+
6
+ COPY requirements.txt ./
7
+
8
+ RUN pip install -r requirements.txt
9
+
10
+ EXPOSE 7860
11
+
12
+ COPY . .
13
+
14
+ CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
README.md ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Agri Tech Fastapi
3
+ emoji: 🪴
4
+ colorFrom: orange
5
+ colorTo: green
6
+ sdk: docker
7
+ pinned: false
8
+ license: mit
9
+ duplicated_from: eaedk/agri-tech-fastapi
10
+ ---
11
+
12
+ Here is the link to directly access the API: [here](https://eaedk-agri-tech-fastapi.hf.space).
13
+ Access the documentation [here](https://eaedk-agri-tech-fastapi.hf.space/docs).
14
+
15
+ To direcly access your API hosted on HuggingFace you should use the URL follow this format : `https://<USERNAME>-<SPACENAME>.hf.space/`
16
+
17
+ In my case it is : https://eaedk-agri-tech-fastapi.hf.space/
18
+
19
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
assets/ml/.gitkeep ADDED
File without changes
assets/ml/crop_recommandation2.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90c49b603c04d5dec6dc0be582b59dc5cd4334ca2e97364b78be91bf314bd19a
3
+ size 1787986
main.py ADDED
@@ -0,0 +1,170 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ import uvicorn
3
+ from typing import List, Literal, Optional
4
+ from pydantic import BaseModel
5
+ import pandas as pd
6
+ import pickle
7
+ import os
8
+ import json
9
+ import logging
10
+
11
+ # logger
12
+ logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG)
13
+
14
+ # Util Functions & Classes
15
+ def loading(fp):
16
+ with open(fp, "rb") as f:
17
+ data = pickle.load(f)
18
+
19
+ print(f"INFO: Loaded data : {data}")
20
+ return data
21
+
22
+
23
+ def predict(df, endpoint="simple"):
24
+ """Take a dataframe as input and use it to make predictions"""
25
+
26
+ print(
27
+ f"[Info] 'predict' function has been called through the endpoint '{endpoint}'.\n"
28
+ )
29
+
30
+ logging.info(f" \n{df.to_markdown()}")
31
+
32
+ # scaling
33
+ scaled_df = scaler.transform(df)
34
+ logging.info(f" Scaler output is of type {type(scaled_df)}")
35
+
36
+ # prediction
37
+ prediction = model.predict_proba(scaled_df)
38
+ print(f"INFO: Prediction output: {prediction}")
39
+
40
+ # Formatting of the prediction
41
+ ## extract highest proba
42
+ highest_proba = prediction.max(axis=1)
43
+ print(f"INFO: Highest probabilities : {highest_proba}")
44
+
45
+ ## extract indexes of the highest proba
46
+ highest_proba_idx = prediction.argmax(axis=1)
47
+ print(f"INFO: Highest probability indexes : {highest_proba_idx}")
48
+
49
+ ## Maching prediction with classes
50
+ predicted_classes = [labels[i] for i in highest_proba_idx]
51
+ print(f"INFO: Predicted classes : {predicted_classes}")
52
+ # prediction[:, highest_proba_idx]
53
+
54
+ # save in df
55
+ df["predicted proba"] = highest_proba
56
+ df["predicted label"] = predicted_classes
57
+
58
+ print(f"INFO: dataframe filled with prediction\n{df.to_markdown()}\n")
59
+
60
+ # parsing prediction
61
+ # parsed = json.loads(df.to_json(orient="index")) # or
62
+ parsed = df.to_dict("records")
63
+
64
+ return parsed
65
+
66
+
67
+ ## INPUT MODELING
68
+ class Land(BaseModel):
69
+ """Modeling of one input data in a type-restricted dictionary-like format
70
+
71
+ column_name : variable type # strictly respect the name in the dataframe header.
72
+
73
+ eg.:
74
+ =========
75
+ customer_age : int
76
+ gender : Literal['male', 'female', 'other']
77
+ """
78
+
79
+ N: float
80
+ P: float
81
+ K: float
82
+ temperature: float
83
+ humidity: float
84
+ ph: float
85
+ rainfall: float
86
+
87
+
88
+ class Lands(BaseModel):
89
+ inputs: List[Land]
90
+
91
+ def return_list_of_dict(
92
+ cls,
93
+ ):
94
+ # return [land.dict() for land in cls.inputs]
95
+ return [i.dict() for i in cls.inputs]
96
+
97
+
98
+ # API Config
99
+ app = FastAPI(title="Agri-Tech API",
100
+ description="This is a ML API for classification of crop to plant on a land regarding some features")
101
+
102
+ # ML Config
103
+ ml_objects = loading(fp=os.path.join("assets", "ml", "crop_recommandation2.pkl"))
104
+ ## Extract the ml components
105
+ model = ml_objects["model"]
106
+ scaler = ml_objects["scaler"].set_output(transform="pandas")
107
+ labels = ml_objects["labels"]
108
+
109
+
110
+ # Endpoints
111
+ @app.get("/")
112
+ def root():
113
+ return {"Description": " This is a ML API for classification of crop to plant on a land regarding some features.",
114
+ "Documentation": "Go to the docs: https://eaedk-agri-tech-fastapi.hf.space/docs"}
115
+
116
+
117
+ @app.get("/checkup")
118
+ def test(a: Optional[int], b: int):
119
+ return {"a": a, "b": b}
120
+
121
+
122
+ ## ML endpoint
123
+ @app.post("/predict")
124
+ def make_prediction(
125
+ N: float,
126
+ P: float,
127
+ K: float,
128
+ temperature: float,
129
+ humidity: float,
130
+ ph: float,
131
+ rainfall: float,
132
+ ):
133
+ """Make prediction with the passed data"""
134
+
135
+ df = pd.DataFrame(
136
+ {
137
+ "N": [N],
138
+ "P": [P],
139
+ "K": [K],
140
+ "temperature": [temperature],
141
+ "humidity": [humidity],
142
+ "ph": [ph],
143
+ "rainfall": [rainfall],
144
+ }
145
+ )
146
+
147
+ parsed = predict(df=df) # df.to_dict('records')
148
+
149
+ return {
150
+ "output": parsed,
151
+ }
152
+
153
+
154
+ @app.post("/predict_multi")
155
+ def make_multi_prediction(multi_lands: Lands):
156
+ """Make prediction with the passed data"""
157
+ print(f"Mutiple inputs passed: {multi_lands}\n")
158
+ df = pd.DataFrame(multi_lands.return_list_of_dict())
159
+
160
+ parsed = predict(df=df, endpoint="multi inputs") # df.to_dict('records')
161
+
162
+ return {
163
+ "output": parsed,
164
+ "author": "Stella Archar",
165
+ "api_version": ";)",
166
+ }
167
+
168
+
169
+ if __name__ == "__main__":
170
+ uvicorn.run("main:app", reload=True)
requirements.txt ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ anyio==3.6.2
2
+ black==23.3.0
3
+ certifi==2023.5.7
4
+ click==8.1.3
5
+ dnspython==2.3.0
6
+ email-validator==2.0.0.post2
7
+ fastapi==0.95.2
8
+ h11==0.14.0
9
+ httpcore==0.17.1
10
+ httptools==0.5.0
11
+ httpx==0.24.1
12
+ idna==3.4
13
+ itsdangerous==2.1.2
14
+ Jinja2==3.1.2
15
+ joblib==1.2.0
16
+ MarkupSafe==2.1.2
17
+ mypy-extensions==1.0.0
18
+ numpy==1.24.3
19
+ orjson==3.8.12
20
+ packaging==23.1
21
+ pandas==2.0.1
22
+ pathspec==0.11.1
23
+ platformdirs==3.5.1
24
+ pydantic==1.10.7
25
+ python-dateutil==2.8.2
26
+ python-dotenv==1.0.0
27
+ python-multipart==0.0.6
28
+ pytz==2023.3
29
+ PyYAML==6.0
30
+ scikit-learn==1.2.2
31
+ scipy==1.10.1
32
+ six==1.16.0
33
+ sniffio==1.3.0
34
+ starlette==0.27.0
35
+ tabulate==0.9.0
36
+ threadpoolctl==3.1.0
37
+ tomli==2.0.1
38
+ typing_extensions==4.5.0
39
+ tzdata==2023.3
40
+ ujson==5.7.0
41
+ uvicorn==0.22.0
42
+ uvloop==0.17.0
43
+ watchfiles==0.19.0
44
+ websockets==11.0.3