sentivity commited on
Commit
70690ce
·
verified ·
1 Parent(s): fc8aa5c

Update app.py

Browse files

adds these changes: more robust print statements, checks all tickers, SPY comparison, account for all exceptions, make cache shorter, adds new polygon api , timestamp of when article collected and scored

Files changed (1) hide show
  1. app.py +68 -76
app.py CHANGED
@@ -91,27 +91,31 @@ def predict_sentiment(text):
91
  def fetch_articles(ticker):
92
  POLYGON_API_KEY = "cMCv7jipVvV4qLBikgzllNmW_isiODRR"
93
  url = f"https://api.polygon.io/v2/reference/news?ticker={ticker}&limit=1&apiKey={POLYGON_API_KEY}"
 
94
  try:
95
- response = requests.get(url)
 
96
  data = response.json()
97
- if "results" in data and len(data["results"]) > 0:
98
  article = data["results"][0]
99
  title = article.get("title", "")
100
  description = article.get("description", "")
101
- return [title+description]
102
- else:
103
- return [f"No news articles found for {ticker}."]
104
- except Exception as e:
105
- return [f"Error fetching articles for {ticker}: {str(e)}"]
106
-
107
- # allowed tickers
108
- ALLOWED_TICKERS = {"AAPL", "GOOG", "AMZN", "NVDA", "META",'TSLA','QQQ','SPY'}
 
 
109
 
110
  # initialize cache
111
- sentiment_cache = {ticker: {"article": None, "sentiment": None, "timestamp": None} for ticker in ALLOWED_TICKERS}
112
 
113
  # checks if cache is valid
114
- def is_cache_valid(cached_time, max_age_minutes=30):
115
  if cached_time is None:
116
  return False
117
  now = datetime.datetime.utcnow()
@@ -119,73 +123,61 @@ def is_cache_valid(cached_time, max_age_minutes=30):
119
  return age.total_seconds() < max_age_minutes * 60
120
 
121
  # analyzes the tikcers
122
- def analyze_ticker(ticker):
123
- ticker = ticker.upper()
124
- if ticker not in ALLOWED_TICKERS:
125
- return [{
126
- "article": f"Sorry, '{ticker}' is not supported. Please choose one of: {', '.join(sorted(ALLOWED_TICKERS))}.",
127
- "sentiment": 0.0
128
- }]
129
-
130
- cache_entry = sentiment_cache[ticker]
131
-
132
- # if cache is valid and article exists
133
- if is_cache_valid(cache_entry["timestamp"]) and cache_entry["article"] is not None:
134
-
135
- return [{
136
- "article": cache_entry["article"],
137
- "sentiment": cache_entry["sentiment"]
138
- }]
139
-
140
- # fetch new article and update cache if cache is invalid
141
- articles = fetch_articles(ticker)
142
- if not articles:
143
- return [{"article": "No articles found.", "sentiment": 0.0}]
144
-
145
- article = articles[0]
146
-
147
- clean_text = preprocess_text(article)
148
- sentiment = predict_sentiment(clean_text)
149
-
150
- # update cache with current time
151
- sentiment_cache[ticker] = {
152
- "article": article,
153
- "sentiment": sentiment,
154
- "timestamp": datetime.datetime.utcnow()
155
- }
156
-
157
- return [{
158
- "article": article,
159
- "sentiment": sentiment
160
- }]
161
-
162
- def display_sentiment(ticker):
163
- results = analyze_ticker(ticker)
164
-
165
- html_output = "<h2>Sentiment Analysis</h2><ul>"
166
- if results:
167
- for r in results:
168
- html_output += f"<li><b>{r['article']}</b><br>Score: {r['sentiment']:.2f}</li>"
169
- else:
170
- html_output += "<li>No sentiment data available for this ticker.</li>"
171
- html_output += "</ul>"
172
- return html_output
173
 
174
  with gr.Blocks() as demo:
175
- gr.Markdown("# Ticker Sentiment Analysis")
176
- ticker_input = gr.Textbox(label="Enter Ticker Symbol (e.g., AAPL)")
177
  output_html = gr.HTML()
178
- analyze_btn = gr.Button("Analyze")
179
-
180
- analyze_btn.click(
181
- fn=lambda t: "<h2>Fetching and scoring sentiment for... " + t + "</h2><p>Please wait...</p>",
182
- inputs=[ticker_input],
183
- outputs=[output_html],
184
- queue=False
185
- ).then(
186
- fn=display_sentiment,
187
- inputs=[ticker_input],
188
- outputs=[output_html]
189
  )
190
 
191
  demo.launch()
 
91
  def fetch_articles(ticker):
92
  POLYGON_API_KEY = "cMCv7jipVvV4qLBikgzllNmW_isiODRR"
93
  url = f"https://api.polygon.io/v2/reference/news?ticker={ticker}&limit=1&apiKey={POLYGON_API_KEY}"
94
+ print(f"[FETCH] {ticker}: {url}")
95
  try:
96
+ response = requests.get(url, timeout=10)
97
+ response.raise_for_status()
98
  data = response.json()
99
+ if data.get("results"):
100
  article = data["results"][0]
101
  title = article.get("title", "")
102
  description = article.get("description", "")
103
+ return title + " " + description
104
+ return f"No news articles found for {ticker}."
105
+ # checks specific HTTP errors
106
+ except requests.exceptions.HTTPError as http_err:
107
+ print(f"[ERROR] HTTP error for {ticker}: {http_err}")
108
+ return f"HTTP error when fetching {ticker}: {http_err}"
109
+ # catches any other error
110
+ except Exception as exc:
111
+ print(f"[ERROR] Unexpected error for {ticker}: {exc}")
112
+ return f"Error fetching articles for {ticker}: {exc}"
113
 
114
  # initialize cache
115
+ sentiment_cache = {}
116
 
117
  # checks if cache is valid
118
+ def is_cache_valid(cached_time, max_age_minutes=10):
119
  if cached_time is None:
120
  return False
121
  now = datetime.datetime.utcnow()
 
123
  return age.total_seconds() < max_age_minutes * 60
124
 
125
  # analyzes the tikcers
126
+ def analyze_ticker(user_ticker: str):
127
+ user_ticker = user_ticker.upper().strip()
128
+ tickers_to_check = list({user_ticker, "SPY"})
129
+ results = []
130
+
131
+ for tk in tickers_to_check:
132
+ cached = sentiment_cache.get(tk, {})
133
+ if cached and is_cache_valid(cached.get("timestamp")):
134
+ print(f"[CACHE] Using cached sentiment for {tk}")
135
+ results.append({**cached, "ticker": tk})
136
+ continue
137
+
138
+ print(f"[INFO] Fetching fresh data for {tk}")
139
+ article_text = fetch_articles(tk)
140
+ sentiment_score = predict_sentiment(article_text)
141
+ timestamp = datetime.datetime.utcnow()
142
+
143
+ cache_entry = {
144
+ "article": article_text,
145
+ "sentiment": sentiment_score,
146
+ "timestamp": timestamp,
147
+ }
148
+ sentiment_cache[tk] = cache_entry
149
+ results.append({**cache_entry, "ticker": tk})
150
+
151
+ # sort so user ticker appears first, SPY second
152
+ results.sort(key=lambda x: 0 if x["ticker"] == user_ticker else 1)
153
+ return results
154
+
155
+
156
+ def display_sentiment(results):
157
+ html = "<h2>Sentiment Analysis</h2><ul>"
158
+ for r in results:
159
+ ts_str = r["timestamp"].strftime("%Y‑%m‑%d %H:%M:%S UTC")
160
+ html += (
161
+ f"<li><b>{r['ticker']}</b> &nbsp;({ts_str})<br>"
162
+ f"{r['article']}<br>"
163
+ f"<i>Sentiment score:</i> {r['sentiment']:.2f}" ")</li>"
164
+ )
165
+ html += "</ul>"
166
+ return html
 
 
 
 
 
 
 
 
 
 
167
 
168
  with gr.Blocks() as demo:
169
+ gr.Markdown("# Ticker vs. SPY Sentiment Tracker")
170
+ input_box = gr.Textbox(label="Enter any ticker symbol (e.g., AAPL)")
171
  output_html = gr.HTML()
172
+ run_btn = gr.Button("Analyze")
173
+
174
+ def _placeholder(t):
175
+ return f"<h3>Gathering latest articles for {t.upper()} and SPY please wait.</h3>"
176
+
177
+ run_btn.click(_placeholder, inputs=input_box, outputs=output_html, queue=False).then(
178
+ lambda t: display_sentiment(analyze_ticker(t)),
179
+ inputs=input_box,
180
+ outputs=output_html,
 
 
181
  )
182
 
183
  demo.launch()