Merge branch 'modules' of https://github.com/death916/deathclock into modules

This commit is contained in:
Death916 2025-03-08 04:52:30 -08:00
commit 765a02e8d4
4 changed files with 59 additions and 36 deletions

View file

@ -22,7 +22,9 @@ def create_app():
html.Div(id='weather-display')
], id='scores-weather-container'),
]),
html.Div(id='news-ticker'),
dcc.Interval(id='clock-interval', interval=60000, n_intervals=0),
dcc.Interval(id='weather-interval', interval=150000, n_intervals=0),
dcc.Interval(id='news-interval', interval=300000, n_intervals=0),
@ -41,7 +43,6 @@ def create_app():
print("ALARM TRIGGERED!")
check_alarms()
return app
if __name__ == '__main__':

View file

@ -18,11 +18,15 @@ class News:
async def _fetch_feed(self, session, feed):
"""Fetches and parses a single feed asynchronously."""
max_entries = 10 # Maximum number of entries to fetch from each feed
try:
async with session.get(feed) as response:
# Add timeout to the request
timeout = aiohttp.ClientTimeout(total=5)
async with session.get(feed, timeout=timeout) as response:
if response.status != 200:
print(f"Skip feed {feed}: status {response.status}")
return []
text = await response.text()
d = feedparser.parse(text)
@ -35,12 +39,14 @@ class News:
for i, post in enumerate(d.entries):
if i >= max_entries:
break # Stop parsing if we've reached the limit
feed_entries.append({
'title': post.title,
'source': d.feed.title if hasattr(d.feed, 'title') else 'Unknown',
'publish_date': post.published if hasattr(post, 'published') else '',
'summary': post.summary if hasattr(post, 'summary') else ''
})
print(f"Added {len(feed_entries)} entries from {feed}")
return feed_entries
@ -65,16 +71,20 @@ class News:
print(f"Error reading feeds.txt: {e}")
return {}
# Limit the number of feeds to process at once
if len(feeds) > 10:
feeds = random.sample(feeds, 10)
print("Getting news entries...")
async with aiohttp.ClientSession() as session:
timeout = aiohttp.ClientTimeout(total=15)
async with aiohttp.ClientSession(timeout=timeout) as session:
tasks = [self._fetch_feed(session, feed) for feed in feeds]
all_feed_entries_list = await asyncio.gather(*tasks)
all_feed_entries_list = await asyncio.gather(*tasks, return_exceptions=True)
all_entries = []
for feed_entries in all_feed_entries_list:
if feed_entries:
#Now just add the entries, because we are already limited
all_entries.extend(feed_entries)
for result in all_feed_entries_list:
if isinstance(result, list) and result:
all_entries.extend(result)
if not all_entries:
print("No entries collected")

View file

@ -29,10 +29,22 @@ class NewsModule:
return self._cached_news
current_time = datetime.datetime.now()
time_since_update = (current_time - self._last_news_update).total_seconds()
# Only update if it's been more than 5 minutes or it's the initial run
if time_since_update < 300 and not self._initial_run:
return self._cached_news
try:
print("UPDATING NEWS...")
# Execute the async function with asyncio.run
headlines_dict = asyncio.run(self.news_obj.get_news())
# Create a new event loop for this request
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
headlines_dict = loop.run_until_complete(self.news_obj.get_news())
loop.close()
if not headlines_dict:
return html.Div("No news available at this time.", className="ticker")
combined_items = " | ".join([f"{data['source']}: {headline}"
for headline, data in headlines_dict.items()])