KatGaw commited on
Commit
0659aaa
1 Parent(s): 4143184

adding new reddit group

Browse files
Files changed (2) hide show
  1. .DS_Store +0 -0
  2. tools/sentiment_analysis_util.py +12 -1
.DS_Store CHANGED
Binary files a/.DS_Store and b/.DS_Store differ
 
tools/sentiment_analysis_util.py CHANGED
@@ -68,6 +68,17 @@ def fetch_reddit_news(topic):
68
  if len(headlines)<10:
69
  for submission in reddit.subreddit('nova').search(topic): #,time_filter='week'):
70
  headlines.add(submission.title + ', Date: ' +datetime.utcfromtimestamp(int(submission.created_utc)).strftime('%Y-%m-%d %H:%M:%S') + ', URL:' +submission.url)
 
 
 
 
 
 
 
 
 
 
 
71
  return headlines
72
 
73
  def analyze_sentiment(article):
@@ -117,7 +128,7 @@ def generate_summary_of_sentiment(sentiment_analysis_results): #, dominant_senti
117
  )
118
 
119
  messages=[
120
- {"role": "system", "content": "You are a helpful assistant that looks at all news articles, their sentiment, along with domainant sentiment and generates a summary rationalizing dominant sentiment. At the end of the summary, add URL links with dates for all the articles in the markdown format for streamlit. Example of adding the URLs: The Check out the links: [link](%s) % url, 2024-03-01 "},
121
  {"role": "user", "content": f"News articles and their sentiments: {news_article_sentiment}"} #, and dominant sentiment is: {dominant_sentiment}"}
122
  ]
123
  response = model.invoke(messages)
 
68
  if len(headlines)<10:
69
  for submission in reddit.subreddit('nova').search(topic): #,time_filter='week'):
70
  headlines.add(submission.title + ', Date: ' +datetime.utcfromtimestamp(int(submission.created_utc)).strftime('%Y-%m-%d %H:%M:%S') + ', URL:' +submission.url)
71
+
72
+ for submission in reddit.subreddit('washingtondc').search(topic,time_filter='week'):
73
+ headlines.add(submission.title + ', Date: ' +datetime.utcfromtimestamp(int(submission.created_utc)).strftime('%Y-%m-%d %H:%M:%S') + ', URL:' +submission.url)
74
+
75
+ if len(headlines)<10:
76
+ for submission in reddit.subreddit('washingtondc').search(topic,time_filter='year'):
77
+ headlines.add(submission.title + ', Date: ' +datetime.utcfromtimestamp(int(submission.created_utc)).strftime('%Y-%m-%d %H:%M:%S') + ', URL:' +submission.url)
78
+ if len(headlines)<10:
79
+ for submission in reddit.subreddit('washingtondc').search(topic): #,time_filter='week'):
80
+ headlines.add(submission.title + ', Date: ' +datetime.utcfromtimestamp(int(submission.created_utc)).strftime('%Y-%m-%d %H:%M:%S') + ', URL:' +submission.url)
81
+
82
  return headlines
83
 
84
  def analyze_sentiment(article):
 
128
  )
129
 
130
  messages=[
131
+ {"role": "system", "content": "You are a helpful assistant that looks at all news articles with their sentiment, hyperlink and date in front of the article text, the articles MUST be ordered by date!, and generate a summary rationalizing dominant sentiment. At the end of the summary, add URL links with dates for all the articles in the markdown format for streamlit. Make sure the articles as well as the links are ordered descending by Date!!!!!!! Example of adding the URLs: The Check out the links: [link](%s) % url, 2024-03-01. "},
132
  {"role": "user", "content": f"News articles and their sentiments: {news_article_sentiment}"} #, and dominant sentiment is: {dominant_sentiment}"}
133
  ]
134
  response = model.invoke(messages)