#!/usr/bin/env python3 import requests from requests.adapters import HTTPAdapter from urllib3.util.retry import Retry import bs4 import feedgenerator from datetime import datetime, timedelta from fastapi import FastAPI, Response app = FastAPI() @app.get("/") async def root(): sneed = feedgenerator.Rss201rev2Feed( title = "Dilbert Comic Strip", description = "New 'bert", link = ("https://dilbert.techchud.xyz/") ) for day in range(0,7): rawDate = datetime.today() - timedelta(days = day) date = rawDate.strftime('%Y-%m-%d') session = requests.Session() retry = Retry(connect=10, backoff_factor=0.5) adapter = HTTPAdapter(max_retries=retry) session.mount('http://', adapter) session.mount('https://', adapter) session.proxies = {'http': 'socks5://127.0.0.1:9050', 'https': 'socks5://127.0.0.1:9050'} search = session.get("https://dilbert.com/strip/" + date) soup = bs4.BeautifulSoup(search.text, features="lxml") comicTitle = soup.find("meta", property="og:title")["content"] comicURL = soup.find("meta", property="og:image")["content"] comicAuthor = soup.find("meta", property="article:author")["content"] url = "https://dilbert.com/strip/" + date sneed.add_item( title=comicTitle, description=comicURL, author_name=comicAuthor, link=url, id=id) feed = sneed.writeString("utf-8") return Response(content=feed, media_type="application/xml")