Commit
·
0bb28cf
1
Parent(s):
45edcec
small fixes
Browse files
app.py
CHANGED
@@ -1,3 +1,7 @@
|
|
|
|
|
|
|
|
|
|
1 |
import arxiv
|
2 |
import gradio as gr
|
3 |
import pandas as pd
|
@@ -5,12 +9,26 @@ from apscheduler.schedulers.background import BackgroundScheduler
|
|
5 |
from cachetools import TTLCache, cached
|
6 |
from setfit import SetFitModel
|
7 |
from tqdm.auto import tqdm
|
8 |
-
import os
|
9 |
|
10 |
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
|
11 |
|
12 |
CACHE_TIME = 60 * 60 * 12 # 12 hours
|
13 |
-
MAX_RESULTS =
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
|
15 |
|
16 |
@cached(cache=TTLCache(maxsize=10, ttl=CACHE_TIME))
|
@@ -94,13 +112,14 @@ all_possible_arxiv_categories = sorted(prepare_data().category.unique().tolist()
|
|
94 |
broad_categories = sorted(prepare_data().broad_category.unique().tolist())
|
95 |
|
96 |
|
|
|
97 |
def create_markdown_summary(categories=None, new_only=True, narrow_categories=None):
|
98 |
df = prepare_data()
|
99 |
if new_only:
|
100 |
df = df[df["prediction"] == "new_dataset"]
|
101 |
if narrow_categories is not None:
|
102 |
df = df[df["category"].isin(narrow_categories)]
|
103 |
-
if categories is not None:
|
104 |
df = prepare_data()
|
105 |
if new_only:
|
106 |
df = df[df["prediction"] == "new_dataset"]
|
|
|
1 |
+
import os
|
2 |
+
from functools import lru_cache, wraps
|
3 |
+
from typing import Any, Callable
|
4 |
+
|
5 |
import arxiv
|
6 |
import gradio as gr
|
7 |
import pandas as pd
|
|
|
9 |
from cachetools import TTLCache, cached
|
10 |
from setfit import SetFitModel
|
11 |
from tqdm.auto import tqdm
|
|
|
12 |
|
13 |
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
|
14 |
|
15 |
CACHE_TIME = 60 * 60 * 12 # 12 hours
|
16 |
+
MAX_RESULTS = 1000
|
17 |
+
|
18 |
+
|
19 |
+
# def list_cacheable(func: Callable[..., Any]) -> Callable[..., Any]:
|
20 |
+
# @lru_cache(maxsize=100)
|
21 |
+
# def cacheable_func(*args: Any, **kwargs: Any) -> Any:
|
22 |
+
# return func(*args, **kwargs)
|
23 |
+
|
24 |
+
# @wraps(func)
|
25 |
+
# def wrapper(*args: Any, **kwargs: Any) -> Any:
|
26 |
+
# # Convert lists to tuples to make them hashable
|
27 |
+
# args = tuple(tuple(arg) if isinstance(arg, list) else arg for arg in args)
|
28 |
+
# kwargs = {k: tuple(v) if isinstance(v, list) else v for k, v in kwargs.items()}
|
29 |
+
# return cacheable_func(*args, **kwargs)
|
30 |
+
|
31 |
+
# return wrapper
|
32 |
|
33 |
|
34 |
@cached(cache=TTLCache(maxsize=10, ttl=CACHE_TIME))
|
|
|
112 |
broad_categories = sorted(prepare_data().broad_category.unique().tolist())
|
113 |
|
114 |
|
115 |
+
# @list_cacheable
|
116 |
def create_markdown_summary(categories=None, new_only=True, narrow_categories=None):
|
117 |
df = prepare_data()
|
118 |
if new_only:
|
119 |
df = df[df["prediction"] == "new_dataset"]
|
120 |
if narrow_categories is not None:
|
121 |
df = df[df["category"].isin(narrow_categories)]
|
122 |
+
if categories is not None and not narrow_categories:
|
123 |
df = prepare_data()
|
124 |
if new_only:
|
125 |
df = df[df["prediction"] == "new_dataset"]
|