mirror of
https://github.com/Death916/deathclock.git
synced 2026-04-10 03:04:40 -07:00
async news
This commit is contained in:
parent
2243c59898
commit
079d4451c8
6 changed files with 424 additions and 60 deletions
Binary file not shown.
|
|
@ -1,6 +1,6 @@
|
|||
import datetime
|
||||
from dash import html, dcc, Input, Output, State
|
||||
import alarm # Import Alarm class
|
||||
import alarm
|
||||
|
||||
class AlarmModule:
|
||||
def __init__(self, app):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,10 @@
|
|||
import feedparser
|
||||
from time import localtime, strftime
|
||||
import asyncio
|
||||
import aiofiles
|
||||
import random
|
||||
from time import localtime, strftime
|
||||
import socket
|
||||
import aiohttp
|
||||
|
||||
def print_time():
|
||||
print(strftime("%B %d, %I:%M %p", localtime()))
|
||||
|
|
@ -10,72 +13,85 @@ class News:
|
|||
def __init__(self):
|
||||
self._news_dict = {}
|
||||
self._news_dict_length = 0
|
||||
# Set timeout for feed fetching
|
||||
socket.setdefaulttimeout(10)
|
||||
|
||||
def get_news(self):
|
||||
print_time()
|
||||
feeds = []
|
||||
self._news_dict = {}
|
||||
self._news_dict_length = 0
|
||||
|
||||
socket.setdefaulttimeout(10) # Set default timeout for socket operations
|
||||
|
||||
async def _fetch_feed(self, session, feed):
|
||||
"""Fetches and parses a single feed asynchronously."""
|
||||
max_entries = 10 # Maximum number of entries to fetch from each feed
|
||||
try:
|
||||
with open("feeds.txt", "r") as f:
|
||||
feeds = [line.strip() for line in f]
|
||||
except Exception as e:
|
||||
print(f"Error reading feeds.txt: {e}")
|
||||
return {}
|
||||
|
||||
all_entries = []
|
||||
print("Getting news entries...")
|
||||
|
||||
for feed in feeds:
|
||||
try:
|
||||
feed_entries = []
|
||||
print(f"Fetching from feed: {feed}") # Debug print
|
||||
d = feedparser.parse(feed)
|
||||
|
||||
async with session.get(feed) as response:
|
||||
if response.status != 200:
|
||||
print(f"Skip feed {feed}: status {response.status}")
|
||||
return []
|
||||
text = await response.text()
|
||||
d = feedparser.parse(text)
|
||||
|
||||
if hasattr(d, 'status') and d.status != 200:
|
||||
print(f"Skip feed {feed}: status {d.status}")
|
||||
continue
|
||||
|
||||
for post in d.entries:
|
||||
return []
|
||||
|
||||
feed_entries = []
|
||||
# Limit the number of entries parsed
|
||||
for i, post in enumerate(d.entries):
|
||||
if i >= max_entries:
|
||||
break # Stop parsing if we've reached the limit
|
||||
feed_entries.append({
|
||||
'title': post.title,
|
||||
'source': d.feed.title if hasattr(d.feed, 'title') else 'Unknown',
|
||||
'publish_date': post.published if hasattr(post, 'published') else '',
|
||||
'summary': post.summary if hasattr(post, 'summary') else ''
|
||||
})
|
||||
|
||||
if feed_entries:
|
||||
selected = random.sample(feed_entries, min(10, len(feed_entries)))
|
||||
all_entries.extend(selected)
|
||||
print(f"Added {len(selected)} entries from {feed}") # Debug print
|
||||
|
||||
if len(all_entries) >= 30:
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error processing feed {feed}: {e}")
|
||||
continue
|
||||
|
||||
print(f"Added {len(feed_entries)} entries from {feed}")
|
||||
return feed_entries
|
||||
|
||||
except aiohttp.ClientError as e:
|
||||
print(f"Error processing feed {feed}: {e}")
|
||||
return []
|
||||
except Exception as e:
|
||||
print(f"Error processing feed {feed}: {e}")
|
||||
return []
|
||||
|
||||
async def get_news(self):
|
||||
print_time()
|
||||
feeds = []
|
||||
self._news_dict = {}
|
||||
self._news_dict_length = 0
|
||||
|
||||
try:
|
||||
async with aiofiles.open("feeds.txt", "r") as f:
|
||||
async for line in f:
|
||||
feeds.append(line.strip())
|
||||
except Exception as e:
|
||||
print(f"Error reading feeds.txt: {e}")
|
||||
return {}
|
||||
|
||||
print("Getting news entries...")
|
||||
async with aiohttp.ClientSession() as session:
|
||||
tasks = [self._fetch_feed(session, feed) for feed in feeds]
|
||||
all_feed_entries_list = await asyncio.gather(*tasks)
|
||||
|
||||
all_entries = []
|
||||
for feed_entries in all_feed_entries_list:
|
||||
if feed_entries:
|
||||
#Now just add the entries, because we are already limited
|
||||
all_entries.extend(feed_entries)
|
||||
|
||||
if not all_entries:
|
||||
print("No entries collected")
|
||||
return {}
|
||||
|
||||
|
||||
if len(all_entries) > 30:
|
||||
all_entries = random.sample(all_entries, 30)
|
||||
|
||||
|
||||
for entry in all_entries:
|
||||
self._news_dict[entry['title']] = entry
|
||||
|
||||
|
||||
try:
|
||||
with open("news.txt", "w") as f:
|
||||
async with aiofiles.open("news.txt", "w") as f:
|
||||
print("Writing news to file...")
|
||||
for entry in self._news_dict.values():
|
||||
f.write(f"[{entry['publish_date']}] {entry['source']}: {entry['title']}\n")
|
||||
f.flush()
|
||||
await f.write(f"[{entry['publish_date']}] {entry['source']}: {entry['title']}\n")
|
||||
except Exception as e:
|
||||
print(f"Error writing to news.txt: {e}")
|
||||
|
||||
|
||||
return self._news_dict
|
||||
|
|
|
|||
|
|
@ -1,47 +1,54 @@
|
|||
import datetime
|
||||
from dash import html, Input, Output
|
||||
from news import News # Import News class
|
||||
import asyncio
|
||||
from dash import html, Input, Output, callback, no_update
|
||||
from dash.exceptions import PreventUpdate
|
||||
from news import News
|
||||
|
||||
class NewsModule:
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
self.news_obj = self.get_news_object()
|
||||
self._last_news_update = datetime.datetime(2000, 1, 1)
|
||||
self._cached_news = []
|
||||
self._cached_news = self.create_loading_message() # Initial loading message
|
||||
self._initial_run = True
|
||||
self.setup_callbacks()
|
||||
|
||||
def get_news_object(self):
|
||||
return News()
|
||||
|
||||
def create_loading_message(self):
|
||||
return html.Div("Loading...")
|
||||
|
||||
def setup_callbacks(self):
|
||||
@self.app.callback(
|
||||
Output('news-ticker', 'children'),
|
||||
Input('news-interval', 'n_intervals')
|
||||
)
|
||||
def update_news(n):
|
||||
if n is None:
|
||||
return self._cached_news
|
||||
|
||||
current_time = datetime.datetime.now()
|
||||
try:
|
||||
print("UPDATING NEWS...")
|
||||
headlines_dict = self.news_obj.get_news()
|
||||
combined_items = " | ".join([f"{data['source']}: {headline}"
|
||||
# Execute the async function with asyncio.run
|
||||
headlines_dict = asyncio.run(self.news_obj.get_news())
|
||||
|
||||
combined_items = " | ".join([f"{data['source']}: {headline}"
|
||||
for headline, data in headlines_dict.items()])
|
||||
|
||||
text_px = len(combined_items) * 8
|
||||
scroll_speed = 75
|
||||
duration = max(text_px / scroll_speed, 20)
|
||||
|
||||
ticker_style = {"animationDuration": f"{duration}s"}
|
||||
|
||||
|
||||
self._cached_news = html.Div(
|
||||
html.Span(combined_items, className="news-item", style=ticker_style),
|
||||
className='ticker'
|
||||
)
|
||||
|
||||
self._last_news_update = current_time
|
||||
self._initial_run = False
|
||||
return self._cached_news
|
||||
|
||||
except Exception as e:
|
||||
if self._cached_news:
|
||||
return self._cached_news
|
||||
print(f"Error updating news: {e}")
|
||||
return html.Div("No news available.")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue