愛媛県の食中毒の発生情報をスクレイピング

import pandas as pd
import requests
from bs4 import BeautifulSoup

url = "https://www.pref.ehime.jp/h25300/4793/shokuchuudoku/hassei.html"

headers = {
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko"
}

r = requests.get(url, headers=headers)
r.raise_for_status()
soup = BeautifulSoup(r.content, "html.parser")

data = []

for trs in soup.select("table.datatable > tbody > tr")[1:]:

    tmp = {}

    tds = [td for td in trs.select("td")]

    tmp["発生年月日"] = tds[0].get_text(strip=True)

    ps = tds[1].select("p")

    for p in ps[:-1]:
        t, c = p.get_text(strip=True).split(":", 1)
        tmp[t] = c
    
    tmp["プレスリリース"] = ps[-1].a.get("href")

    data.append(tmp)


df = pd.DataFrame(data)

df.to_csv("data.tsv", sep="\t")