import requests from bs4 import BeautifulSoup from urllib.parse import urljoin headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko" } def fetch_soup(url, parser="html.parser"): r = requests.get(url, headers=headers) r.raise_for_status() soup = BeautifulSoup(r.content, parser) return soup url = "https://ahamo.com/support/supported-phones/index.html" soup = fetch_soup(url) data = [i.get_text(strip=True) for i in soup.select("div > ul > li > span")] import pandas as pd se = pd.Series(data) d = se.str.replace("※[123]?", "").str.strip() d.to_csv("ahamo.csv", index=None, header=None, encoding="utf_8_sig")