import sysimport importlibimportlib.reload(sys)
import requestsfrom fake_useragent import UserAgentfrom bs4 import BeautifulSoup
headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36', 'Cookie': '', 'Referer': '
https://store.hikarifield.co.jp/full_games'}
base_url = "
https://store.hikarifield.co.jp/shop/"
product_ids = ["tayutama2", "sakura_no_mori", "monobeno", "monobeno_happy_end", "hello_lady", "maitetsu_pure_station", "sakura_no_mori2", "hello_lady_nd", "tsukikage", "aokana", "alias_carnival", "natsunoiro", "senren_banka", "tryment_alpha", "happiness2", "maitetsu_lastrun", "aokana_extra1", "riddle_joker", "relief", "kinkoi", "parquet", "honoguraki", "hello_lady_se", "madoki", "sekachu", "stella", "hananono", "magical_charming", "tsukiniyorisou", "kinkoigt", "yumahorome", "cross_concerto", "soratoto", "aonatsu", "aokana_extra2", "future_radio", "shuffle_ep2", "witch_garden", "alias_carnival_flowering_sky", "sothewitch", "koikake", "clover_days", "arcana", "otomeriron", "seikano", "making_lovers" ]
data_list = []
for product_id in product_ids: url = f"{base_url}{product_id}" try: response = requests.get(url, headers=headers) response.raise_for_status() soup = BeautifulSoup(response.content, "html.parser") product_name = soup.find("h1", class_="col-12 col-md p-0").text.strip() product_price = soup.find("span", class_="discount-price").text.strip() data_list.append((product_id, product_name, product_price)) except requests.RequestException as e: print(f"Error fetching data for product ID {product_id}: {e}")
with open("hf_out.txt", "w", encoding="utf-8") as file: for data in data_list: file.write(f"{data[0]}\n") file.write(f"{data[1]}\n") file.write(f"{data[2]}\n\n")
print("Ciallo~(∠・ω<)⌒☆数据已保存致hf_out.txt 文件中")