File size: 3,494 Bytes
5b744f4
5634403
b876064
bfa04fa
b876064
5b744f4
b64289a
b876064
5b744f4
5634403
ad533f4
 
5634403
ad533f4
5b744f4
5634403
ad533f4
5b744f4
5634403
 
ad533f4
5634403
 
ad533f4
5634403
b64289a
 
5634403
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b64289a
ad533f4
5634403
b64289a
 
5634403
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b64289a
ad533f4
5634403
 
 
 
 
 
ad533f4
 
5634403
 
 
 
 
 
ad533f4
 
5634403
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
import streamlit as st
from openai import OpenAI
import os
import requests
from dotenv import load_dotenv

# Load environment variables
load_dotenv()

# Initialize the client with HuggingFace
client = OpenAI(
    base_url="https://api-inference.huggingface.co/v1",
    api_key=os.environ.get('HFSecret')  # Replace with your HuggingFace token
)

# Define the Llama 3 8B model
repo_id = "meta-llama/Meta-Llama-3-8B-Instruct"

# Title of the App
st.title("Text Analysis with Llama 3: Sentiment, Summarization, and Keyword Extraction")

# Dropdown options to choose a text file
options = ['None', 'Appreciation Letter', 'Regret Letter', 'Kindness Tale', 'Lost Melody Tale', 'Twitter Example 1', 'Twitter Example 2']

# Create a dropdown menu to select options
selected_option = st.selectbox("Select a preset option", options)

# Define URLs for different text options
url_dict = {
    'Appreciation Letter': "https://raw.githubusercontent.com/peteciank/public_files/main/Transformers/Appreciation_Letter.txt",
    'Regret Letter': "https://raw.githubusercontent.com/peteciank/public_files/main/Transformers/Regret_Letter.txt",
    'Kindness Tale': "https://raw.githubusercontent.com/peteciank/public_files/main/Transformers/Kindness_Tale.txt",
    'Lost Melody Tale': "https://raw.githubusercontent.com/peteciank/public_files/main/Transformers/Lost_Melody_Tale.txt",
    'Twitter Example 1': "https://raw.githubusercontent.com/peteciank/public_files/main/Transformers/Twitter_Example_1.txt",
    'Twitter Example 2': "https://raw.githubusercontent.com/peteciank/public_files/main/Transformers/Twitter_Example_2.txt"
}

# Function to fetch text content
def fetch_text_content(option):
    if option in url_dict:
        response = requests.get(url_dict[option])
        return response.text if response.status_code == 200 else "Error fetching the text"
    return ""

# Fetch the selected text
jd = fetch_text_content(selected_option)

# Display fetched text
text = st.text_area('Enter the text to analyze', jd)

# Function to call Llama 3 for analysis
def call_llama_analysis(task, text):
    prompt = f"Perform {task} on the following text:\n\n{text}"
    
    # Call Llama 3 for the task
    response = client.completions.create(
        model=repo_id,
        prompt=prompt,
        max_tokens=3000,
        temperature=0.5
    )
    
    return response['choices'][0]['text']

# Start analysis on button click
if st.button("Start Analysis"):
    with st.spinner("Analyzing Sentiment..."):
        try:
            sentiment_result = call_llama_analysis("sentiment analysis", text)
            with st.expander("Sentiment Analysis - ✅ Completed", expanded=True):
                st.write(sentiment_result)
        except Exception as e:
            st.error(f"Error in Sentiment Analysis: {str(e)}")

    with st.spinner("Summarizing..."):
        try:
            summary_result = call_llama_analysis("summarization", text)
            with st.expander("Summarization - ✅ Completed", expanded=True):
                st.write(summary_result)
        except Exception as e:
            st.error(f"Error in Summarization: {str(e)}")

    with st.spinner("Extracting Keywords..."):
        try:
            keywords_result = call_llama_analysis("keyword extraction", text)
            with st.expander("Keywords Extraction - ✅ Completed", expanded=True):
                st.write(keywords_result)
        except Exception as e:
            st.error(f"Error in Keyword Extraction: {str(e)}")