summaryrefslogtreecommitdiffstats
path: root/kadomatsu-rss.py
blob: 47a7e6893b83b1c21893e2889ab41a083a9a35fe (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
#!/usr/bin/env python3
import cgi
import os

from datetime import datetime

import feedgenerator
import requests

from bs4 import BeautifulSoup
from urllib.parse import urljoin
import threading


root = "https://www.toshiki-kadomatsu.jp/information/"

feed = feedgenerator.Rss201rev2Feed(
    title="角松敏生 OFFICIAL SITE",
    description="角松敏生 OFFICIAL SITE - INFORMATION section",
    link=root,
    language="ja",
)

s = requests.Session()

s.headers.update({'User-Agent': 'kadomatsu-rss https://427738.xyz/kadomatsu-rss.html'})

def get_soup(url):
    site = s.get(url)
    site.encoding = "utf-8"
    soup = BeautifulSoup(site.text, "lxml")
    return soup

def relative2absolute(rel):
    return urljoin(root, rel)

def format_article(soup):
    for i in soup.find_all("a"):
        i["href"] = relative2absolute(i["href"])
    for i in soup.find_all("img"):
        i["src"] = relative2absolute(i["src"])
    return soup

def get_article(url):
    soup = get_soup(url)
    article = soup.find("li", id="Detail")
    return format_article(article)

def add_article(i):
    title = list(i.find("a").strings)[0]
    date = datetime.strptime(i.find("time")["datetime"], "%Y-%m-%d")
    content_url = relative2absolute(i.find("a")["href"])
    content = get_article(content_url)
    feed.add_item(
        title=title,
        pubdate=date,
        link=content_url,
        description=content,
    )

def make_feed(soup):
    items = soup.find("ul", id="List")
    threads = []
    for i in items.find_all("dl"):
        thread = threading.Thread(target=add_article, args=(i,))
        threads.append(thread)
        thread.start()
    for thread in threads:
        thread.join()

if __name__ == "__main__":
	make_feed(get_soup(root + "?select=all"))

	print("Content-Type: application/rss+xml; charset=UTF-8")
	print()
	print(feed.writeString("utf-8"))

	public_file_path = os.getenv("KADOMATSU_RSS_FILE")
	if public_file_path is not None:
		with open(public_file_path, "w") as public_file:
			feed.write(public_file, 'utf-8')