mirror of
https://github.com/Death916/deathclock.git
synced 2026-04-10 03:04:40 -07:00
news
This commit is contained in:
parent
0b12a928f5
commit
2b09d76131
4 changed files with 138 additions and 66 deletions
|
|
@ -6,19 +6,19 @@ from time import localtime, strftime
|
|||
import socket
|
||||
import aiohttp
|
||||
|
||||
|
||||
def print_time():
|
||||
print(strftime("%B %d, %I:%M %p", localtime()))
|
||||
|
||||
|
||||
class News:
|
||||
def __init__(self):
|
||||
self._news_dict = {}
|
||||
self._news_dict_length = 0
|
||||
socket.setdefaulttimeout(10) # Set default timeout for socket operations
|
||||
|
||||
socket.setdefaulttimeout(10) # Set default timeout for socket operations
|
||||
|
||||
async def _fetch_feed(self, session, feed):
|
||||
"""Fetches and parses a single feed asynchronously."""
|
||||
max_entries = 10 # Maximum number of entries to fetch from each feed
|
||||
|
||||
max_entries = 10 # Maximum number of entries to fetch from each feed
|
||||
|
||||
try:
|
||||
# Add timeout to the request
|
||||
timeout = aiohttp.ClientTimeout(total=5)
|
||||
|
|
@ -26,30 +26,36 @@ class News:
|
|||
if response.status != 200:
|
||||
print(f"Skip feed {feed}: status {response.status}")
|
||||
return []
|
||||
|
||||
|
||||
text = await response.text()
|
||||
d = feedparser.parse(text)
|
||||
|
||||
if hasattr(d, 'status') and d.status != 200:
|
||||
|
||||
if hasattr(d, "status") and d.status != 200:
|
||||
print(f"Skip feed {feed}: status {d.status}")
|
||||
return []
|
||||
|
||||
|
||||
feed_entries = []
|
||||
# Limit the number of entries parsed
|
||||
for i, post in enumerate(d.entries):
|
||||
if i >= max_entries:
|
||||
break # Stop parsing if we've reached the limit
|
||||
|
||||
feed_entries.append({
|
||||
'title': post.title,
|
||||
'source': d.feed.title if hasattr(d.feed, 'title') else 'Unknown',
|
||||
'publish_date': post.published if hasattr(post, 'published') else '',
|
||||
'summary': post.summary if hasattr(post, 'summary') else ''
|
||||
})
|
||||
|
||||
break # Stop parsing if we've reached the limit
|
||||
|
||||
feed_entries.append(
|
||||
{
|
||||
"title": post.title,
|
||||
"source": d.feed.title
|
||||
if hasattr(d.feed, "title")
|
||||
else "Unknown",
|
||||
"publish_date": post.published
|
||||
if hasattr(post, "published")
|
||||
else "",
|
||||
"summary": post.summary if hasattr(post, "summary") else "",
|
||||
}
|
||||
)
|
||||
|
||||
print(f"Added {len(feed_entries)} entries from {feed}")
|
||||
return feed_entries
|
||||
|
||||
|
||||
except aiohttp.ClientError as e:
|
||||
print(f"Error processing feed {feed}: {e}")
|
||||
return []
|
||||
|
|
@ -62,7 +68,7 @@ class News:
|
|||
feeds = []
|
||||
self._news_dict = {}
|
||||
self._news_dict_length = 0
|
||||
|
||||
|
||||
try:
|
||||
async with aiofiles.open("feeds.txt", "r") as f:
|
||||
async for line in f:
|
||||
|
|
@ -70,38 +76,37 @@ class News:
|
|||
except Exception as e:
|
||||
print(f"Error reading feeds.txt: {e}")
|
||||
return {}
|
||||
|
||||
|
||||
# Limit the number of feeds to process at once
|
||||
if len(feeds) > 10:
|
||||
feeds = random.sample(feeds, 10)
|
||||
|
||||
|
||||
print("Getting news entries...")
|
||||
timeout = aiohttp.ClientTimeout(total=15)
|
||||
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||
tasks = [self._fetch_feed(session, feed) for feed in feeds]
|
||||
all_feed_entries_list = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
|
||||
all_entries = []
|
||||
for result in all_feed_entries_list:
|
||||
if isinstance(result, list) and result:
|
||||
all_entries.extend(result)
|
||||
|
||||
|
||||
if not all_entries:
|
||||
print("No entries collected")
|
||||
return {}
|
||||
|
||||
return []
|
||||
|
||||
if len(all_entries) > 30:
|
||||
all_entries = random.sample(all_entries, 30)
|
||||
|
||||
for entry in all_entries:
|
||||
self._news_dict[entry['title']] = entry
|
||||
|
||||
|
||||
try:
|
||||
async with aiofiles.open("news.txt", "w") as f:
|
||||
print("Writing news to file...")
|
||||
for entry in self._news_dict.values():
|
||||
await f.write(f"[{entry['publish_date']}] {entry['source']}: {entry['title']}\n")
|
||||
for entry in all_entries:
|
||||
await f.write(
|
||||
f"[{entry['publish_date']}] {entry['source']}: {entry['title']}\n"
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Error writing to news.txt: {e}")
|
||||
|
||||
return self._news_dict
|
||||
|
||||
return all_entries
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue