楽天モバイルのエリア状況をバックアップ

# スクレイピング

import datetime

import requests
from bs4 import BeautifulSoup

headers = {
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko"
}


def fetch_soup(url, parser="html.parser"):

    r = requests.get(url, headers=headers)
    r.raise_for_status()

    soup = BeautifulSoup(r.content, parser)

    return soup


url = "https://network.mobile.rakuten.co.jp/area/"

soup = fetch_soup(url)

text = soup.select_one("div.area-Top_Map > p").get_text(strip=True)

dt_update = datetime.datetime.strptime(text, "%Y年%m月%d日更新").date()

update = dt_update.strftime("%Y%m%d")

print(update)

# マップ

from staticmap import StaticMap

rakuten2m = "https://gateway-api.global.rakuten.com/dsd/geoserver/4g2m/mno_coverage_map/gwc/service/gmaps?LAYERS=mno_coverage_map:all_map&FORMAT=image/png&TRANSPARENT=TRUE&x={x}&y={y}&zoom={z}"
rakuten4m = "https://gateway-api.global.rakuten.com/dsd/geoserver/4g4m/mno_coverage_map/gwc/service/gmaps?LAYERS=mno_coverage_map:all_map&FORMAT=image/png&TRANSPARENT=TRUE&x={x}&y={y}&zoom={z}"

cities = [
    {"name": "東予", "lat": 34.024779, "lng": 133.183823},
    {"name": "中予", "lat": 33.673497, "lng": 132.702484},
    {"name": "南予1", "lat": 33.493021, "lng": 132.521553},
    {"name": "南予2", "lat": 33.170318, "lng": 132.524815},
]

width, height = 3000, 2000
zoom = 12

for city in cities:

    for i, url in enumerate([rakuten2m, rakuten4m], 1):

        smap = StaticMap(width, height, url_template=url)

        img = smap.render(zoom=zoom, center=[city["lng"], city["lat"]])
        img.save(f'{update}_{city["name"]}_m{i*2}.png')