1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
|
#!/usr/bin/env python3
import requests
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
import bs4
import feedgenerator
from datetime import datetime, timedelta
from fastapi import FastAPI, Response
app = FastAPI()
@app.get("/")
async def root():
sneed = feedgenerator.Rss201rev2Feed(
title = "Dilbert Comic Strip",
description = "New 'bert",
link = ("https://dilbert.techchud.xyz/")
)
for day in range(0,7):
rawDate = datetime.today() - timedelta(days = day)
date = rawDate.strftime('%Y-%m-%d')
session = requests.Session()
retry = Retry(connect=10, backoff_factor=0.5)
adapter = HTTPAdapter(max_retries=retry)
session.mount('http://', adapter)
session.mount('https://', adapter)
session.proxies = {'http': 'socks5://127.0.0.1:9050', 'https': 'socks5://127.0.0.1:9050'}
search = session.get("https://dilbert.com/strip/" + date)
soup = bs4.BeautifulSoup(search.text, features="lxml")
try:
comicTitle = soup.find("meta", property="og:title")["content"]
comicURL = soup.find("meta", property="og:image")["content"]
comicAuthor = soup.find("meta", property="article:author")["content"]
except:
continue
url = "https://dilbert.com/strip/" + date
sneed.add_item( title=comicTitle, description=comicURL, author_name=comicAuthor, link=url, id=id)
feed = sneed.writeString("utf-8")
return Response(content=feed, media_type="application/xml")
|