onecatper.day/rss_update.py
zepp 129e5bfd57 Squashing bugs I created and didn't test for :(
Moving the constant to two files caused the time
drift I warned myself about but forgot. I shaved
the seconds off of the constant as well since if
it takes an hour between downloading the image and
updating the feed and website, something out of my
control is going on
2024-10-28 08:51:33 -04:00

92 lines
3.1 KiB
Python

from random import randint
import datetime
# this will change between calls due to microseconds
# better for it just to be a global
DATETIME_STR = str(datetime.datetime.today()).replace(' ', "-")
# we don't need those pesky microseconds any way
DATETIME_STR = DATETIME_STR[:-10]
# refrence: https://cyber.harvard.edu/rss/rss.html#ltguidgtSubelementOfLtitemgt
# also updates pubDate and lastBuildDate
# finally, updates the image name as well
def update_rss_feed(rss_file_name):
rss_file = ""
with open(rss_file_name, "r") as f:
rss_file = f.read()
element_list = rss_file.split('\n')
items = []
for el in element_list:
if "link" in el:
items.append(el)
if "guid" in el:
items.append(el)
if "enclosure" in el:
items.append(el)
if "<lastBuildDate>" in el:
items.append(el)
if "<pubDate>" in el:
items.append(el)
guid_items = []
for el in items:
if "https://onecatper.day#" in el:
guid_items.append(el)
hash_located_at = []
for el in guid_items:
hash_located_at.append(el.index('#'))
CLOSING_TAG_LENGTH = 7
as_char_list = list(items[2])
for i in range(hash_located_at[0] + 1, len(as_char_list) - CLOSING_TAG_LENGTH):
as_char_list[i] = str(randint(0, 9))
new_guid_link_one = "".join(as_char_list)
as_char_list = list(items[4])
for i in range(hash_located_at[1] + 1, len(as_char_list) - CLOSING_TAG_LENGTH):
as_char_list[i] = str(randint(0, 9))
new_guid_link_two = "".join(as_char_list)
split_file = rss_file.split("\n")
old_guid_links = []
for element in split_file:
if "https://onecatper.day#" in element:
old_guid_links.append(element)
# preppring strings for replacement of the old strings
# cat image stored in img/
old_img_link = items[-1].split(" ")
old_cat_image = old_img_link[-1].split("\"")
old_cat_image_url = old_cat_image[1]
new_cat_image_url = f"https://onecatper.day/img/cat{DATETIME_STR}.jpg"
# storess dates and times in the rss.xml file, needs to be updated for feed to update
old_last_build_date = items[1]
old_pub_date = items[3]
# date and time need to be a specific format
# don't want to use constant for this, it needs to be formated
rss_date = datetime.datetime.strftime(datetime.datetime.now(), "%a, %d %b %Y %H:%M:%S -0500")
# create new date/time links for feed
new_last_build_date = f"\t\t<lastBuildDate>{rss_date}</lastBuildDate>"
new_pub_date = f"\t\t\t<pubDate>{rss_date}</pubDate>"
# replace what is necessary to make the feed update
rss_file = rss_file.replace(old_guid_links[0], new_guid_link_one)
rss_file = rss_file.replace(old_guid_links[1], new_guid_link_two)
rss_file = rss_file.replace(old_cat_image_url, new_cat_image_url)
rss_file = rss_file.replace(old_last_build_date, new_last_build_date)
rss_file = rss_file.replace(old_pub_date, new_pub_date)
# write the changes to disk
with open(rss_file_name, "w") as f:
f.write(rss_file)
update_rss_feed("rss.xml")