#!/usr/bin/env python3 import requests import bs4 import time import feedgenerator import json from datetime import datetime, timedelta from fastapi import FastAPI, Response app = FastAPI() @app.get("/") async def root(): sneed = feedgenerator.Rss201rev2Feed( title = "Dilbert Comic Strip", description = "New 'bert", link = ("https://dilbert.techchud.xyz/") ) for day in range(0,7): rawDate = datetime.today() - timedelta(days = day) date = rawDate.strftime('%Y-%m-%d') tries = 25 for i in range(tries): search = requests.get("https://dilbert.com/strip/" + date) if search.status_code == 503: if i < tries - 1: time.sleep(1) continue else: raise break soup = bs4.BeautifulSoup(search.text, features="lxml") comicTitle = soup.find("meta", property="og:title")["content"] comicURL = soup.find("meta", property="og:image")["content"] comicAuthor = soup.find("meta", property="article:author")["content"] url = "https://dilbert.com/strip/" + date sneed.add_item( title=comicTitle, description=comicURL, author_name=comicAuthor, link=url, id=id) feed = sneed.writeString("utf-8") return Response(content=feed, media_type="application/xml")