add logging and cli arguments
This commit is contained in:
parent
1befc62b6c
commit
04280d1b9b
65
rss.py
65
rss.py
|
@ -1,11 +1,19 @@
|
||||||
import os
|
import os
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
import requests
|
import requests
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
FEED = 'https://discuss.32bit.cafe/all/new.atom'
|
LOGGER = logging.getLogger(__name__)
|
||||||
FILENAME = os.path.basename(FEED)
|
LOGGER.setLevel(logging.INFO)
|
||||||
WEBHOOK = 'https://discord.com/api/webhooks/1152044037017186417/igGNMxiVhLSW9y4kL8cs7y6m0AutxqXHxMTWSXWN0jbCUnsxLT_er8pqWQKoAjGpPywg'
|
|
||||||
|
ch = logging.StreamHandler()
|
||||||
|
ch.setLevel(logging.INFO)
|
||||||
|
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||||
|
ch.setFormatter(formatter)
|
||||||
|
|
||||||
|
LOGGER.addHandler(ch)
|
||||||
|
|
||||||
|
|
||||||
def tag(tagname: str) -> str:
|
def tag(tagname: str) -> str:
|
||||||
|
@ -21,46 +29,45 @@ class Entry:
|
||||||
self.date = datetime.fromisoformat(entry.find(tag('published')).text)
|
self.date = datetime.fromisoformat(entry.find(tag('published')).text)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f'{self.title} by {self.author} at {self.date} \n {self.url}'
|
return f'New post by {self.author}: {self.title}\n{self.url}'
|
||||||
|
|
||||||
|
|
||||||
def get_last_update_time():
|
def get_last_update_time(file: str) -> datetime:
|
||||||
try:
|
try:
|
||||||
stat = os.stat(FILENAME)
|
stat = os.stat(file)
|
||||||
return datetime.fromtimestamp(stat.st_mtime,
|
return datetime.fromtimestamp(stat.st_mtime,
|
||||||
datetime.now().astimezone().tzinfo)
|
datetime.now().astimezone().tzinfo)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
return datetime.now(timezone.utc)
|
return datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
|
||||||
def update_rss_file():
|
def update_rss_file(file, feed):
|
||||||
r = requests.get(FEED, stream=True)
|
r = requests.get(feed, stream=True)
|
||||||
with open(FILENAME, 'w') as fd:
|
with open(file, 'w') as fd:
|
||||||
for chunk in r.iter_content(chunk_size=1024, decode_unicode=True):
|
for chunk in r.iter_content(chunk_size=1024, decode_unicode=True):
|
||||||
fd.write(chunk)
|
fd.write(chunk)
|
||||||
|
|
||||||
|
|
||||||
def parse_rss_file():
|
def parse_rss_file(file) -> ET.Element:
|
||||||
tree = ET.parse(FILENAME)
|
tree = ET.parse(file)
|
||||||
return tree.getroot()
|
return tree.getroot()
|
||||||
|
|
||||||
|
|
||||||
def get_new_posts(time, root):
|
def get_new_posts(time, root) -> list[Entry]:
|
||||||
posts = []
|
posts = []
|
||||||
for entry in root.findall(tag('entry')):
|
for entry in root.findall(tag('entry')):
|
||||||
e = Entry(entry)
|
e = Entry(entry)
|
||||||
print(e.date)
|
|
||||||
if e.date > time:
|
if e.date > time:
|
||||||
posts.append(e)
|
posts.append(e)
|
||||||
return posts
|
return posts
|
||||||
|
|
||||||
|
|
||||||
def send_message(entry):
|
def send_message(entry, url) -> int:
|
||||||
obj = {
|
obj = {
|
||||||
'content': str(entry)
|
'content': str(entry)
|
||||||
}
|
}
|
||||||
r = requests.post(WEBHOOK, obj)
|
r = requests.post(url, obj)
|
||||||
print(r.status_code)
|
return r.status_code
|
||||||
|
|
||||||
# Check last mod date of current rss file
|
# Check last mod date of current rss file
|
||||||
# Download new version of file
|
# Download new version of file
|
||||||
|
@ -69,11 +76,25 @@ def send_message(entry):
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
last_updated = get_last_update_time()
|
parser = argparse.ArgumentParser(
|
||||||
print(last_updated)
|
prog='discussfeed',
|
||||||
update_rss_file()
|
description='Checks for new entries from an atom feed and sends them to a discord webhook')
|
||||||
root = parse_rss_file()
|
parser.add_argument('-w', '--webhook')
|
||||||
|
parser.add_argument('feed_url')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
filename = os.path.basename(args.feed_url)
|
||||||
|
last_updated = get_last_update_time(filename)
|
||||||
|
LOGGER.info(f'last updated: {last_updated}')
|
||||||
|
|
||||||
|
update_rss_file(filename, args.feed_url)
|
||||||
|
root = parse_rss_file(filename)
|
||||||
posts = get_new_posts(last_updated, root)
|
posts = get_new_posts(last_updated, root)
|
||||||
print(posts)
|
LOGGER.info(f'found {len(posts)} new posts')
|
||||||
|
|
||||||
for post in posts:
|
for post in posts:
|
||||||
send_message(post)
|
status = send_message(post, args.webhook)
|
||||||
|
if status >= 300:
|
||||||
|
LOGGER.error(f'Response code {status}')
|
||||||
|
else:
|
||||||
|
LOGGER.info(f'response code {status}')
|
||||||
|
|
Loading…
Reference in New Issue