alex-abb commited on
Commit
2e937f5
·
verified ·
1 Parent(s): 0658229

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +39 -27
app.py CHANGED
@@ -1,13 +1,15 @@
1
  import requests
2
- import gradio as gr
3
- import bs4
4
  from bs4 import BeautifulSoup
 
 
5
 
6
 
7
- # Configuration de l'API (à ajuster selon votre setup dans le Space)
8
- API_TOKEN = "votre_token_api"
9
  API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct"
10
- headers = {"Authorization": f"Bearer {API_TOKEN}"}
 
 
 
11
 
12
  def query(payload):
13
  response = requests.post(API_URL, headers=headers, json=payload)
@@ -15,14 +17,14 @@ def query(payload):
15
 
16
  def analyze_sentiment(text):
17
  output = query({
18
- "inputs": f'''<|begin_of_text|>
19
- <|start_header_id|>system<|end_header_id|>
20
- you are going to analyse the prompt that i'll give to you and tell me if they are either talking about "chat bot", "AI dev",
21
- <|eot_id|>
22
- <|start_header_id|>user<|end_header_id|>
23
  {text}
24
- <|eot_id|>
25
- <|start_header_id|>assistant<|end_header_id|>
26
  '''
27
  })
28
 
@@ -35,38 +37,48 @@ you are going to analyse the prompt that i'll give to you and tell me if they ar
35
  return "AI dev"
36
  else:
37
  return "autre"
 
38
 
39
  def scrape_and_analyze(url):
40
  try:
41
  response = requests.get(url)
 
 
 
42
  soup = BeautifulSoup(response.text, 'html.parser')
43
 
44
  # Ajustez ce sélecteur selon la structure du site cible
45
- posts = soup.find_all('div', class_='post')
46
 
47
  categories = {"chat bot": 0, "AI dev": 0, "autre": 0}
 
 
48
 
49
  for post in posts:
50
- content = post.find('div', class_='content').text.strip() if post.find('div', class_='content') else "Pas de contenu"
 
 
 
51
  category = analyze_sentiment(content)
52
  categories[category] += 1
 
 
 
 
 
 
 
 
53
 
54
- total_posts = sum(categories.values())
55
- result = f"Total des posts analysés : {total_posts}\n"
56
- result += f"chat bot : {categories['chat bot']}\n"
57
- result += f"AI dev : {categories['AI dev']}\n"
58
- result += f"autre : {categories['autre']}"
59
 
60
  return result
61
  except Exception as e:
62
  return f"Une erreur s'est produite : {str(e)}"
63
 
64
- iface = gr.Interface(
65
- fn=scrape_and_analyze,
66
- inputs="text",
67
- outputs="text",
68
- title="Analyse de posts de blog",
69
- description="Entrez l'URL d'un blog pour analyser ses posts."
70
- )
71
 
72
- iface.launch()
 
1
  import requests
 
 
2
  from bs4 import BeautifulSoup
3
+ import gradio as gr
4
+ import os
5
 
6
 
7
+ api_token = os.environ.get("TOKEN")
 
8
  API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct"
9
+ headers = {"Authorization": f"Bearer {api_token}"}
10
+
11
+ url = "https://huggingface.co/posts"
12
+
13
 
14
  def query(payload):
15
  response = requests.post(API_URL, headers=headers, json=payload)
 
17
 
18
  def analyze_sentiment(text):
19
  output = query({
20
+ "inputs": f'''
21
+ system
22
+ you are going to analyse the prompt that I'll give to you and tell me if they are either talking about "chat bot", "AI dev", or something else.
23
+
24
+ user
25
  {text}
26
+
27
+ assistant
28
  '''
29
  })
30
 
 
37
  return "AI dev"
38
  else:
39
  return "autre"
40
+ return "autre"
41
 
42
  def scrape_and_analyze(url):
43
  try:
44
  response = requests.get(url)
45
+ if response.status_code != 200:
46
+ return f"Erreur lors de la requête : {response.status_code}"
47
+
48
  soup = BeautifulSoup(response.text, 'html.parser')
49
 
50
  # Ajustez ce sélecteur selon la structure du site cible
51
+ posts = soup.find_all('div', class_='cursor-pointer')
52
 
53
  categories = {"chat bot": 0, "AI dev": 0, "autre": 0}
54
+ total_posts = 0
55
+ result = ""
56
 
57
  for post in posts:
58
+ total_posts += 1
59
+ content = post.find('div', class_='relative').text.strip() if post.find('div', class_='relative') else "Pas de contenu"
60
+
61
+ # Analyse du texte
62
  category = analyze_sentiment(content)
63
  categories[category] += 1
64
+
65
+ # Affichage en temps réel
66
+ print(f"Post {total_posts} analysé. Catégorie : {category}")
67
+ print(f"Compteurs actuels : {categories}")
68
+ print("---")
69
+
70
+ # Ajout des résultats à la chaîne finale
71
+ result += f"Post {total_posts} : Catégorie {category}\n"
72
 
73
+ # Résultat final
74
+ result += f"\nTotal des posts analysés : {total_posts}\n"
75
+ for cat, count in categories.items():
76
+ result += f"{cat} : {count}\n"
 
77
 
78
  return result
79
  except Exception as e:
80
  return f"Une erreur s'est produite : {str(e)}"
81
 
82
+ result = scrape_and_analyze(url)
83
+ print(result)
 
 
 
 
 
84