⎗ ✓ 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58import json import time import requests from bs4 import BeautifulSoup path_f = "followers.txt" f_list = [] path_fd = "followers_detail.txt" fd_list = [] delay_seconds = 0.1 headers = { "Accept": "*/*", "Accept-Language": "ja", "Origin": "https://www.nicovideo.jp", "Referer": "https://www.nicovideo.jp/", "X-Frontend-Id": "6", "X-Frontend-Version": "0", "X-Niconico-Language": "ja-jp", } with open(path_f, 'r', encoding='utf-8') as f: f_list = [json.loads(line) for line in f] for i, f in enumerate(f_list): uid = f["data"]["items"][0]["id"] url = f"https://www.nicovideo.jp/user/{uid}" print(f"[{i}] {url}") try: res = requests.get(url, headers=headers, timeout=10) res.raise_for_status() soup = BeautifulSoup(res.text, "html.parser") tag = soup.find(id="js-initial-userpage-data") if not tag: print(f"not found: js-initial-userpage-data") fd_list.append({}) continue data_init = tag.get("data-initial-data") if not data_init: print(f"not found: data-initial-data") fd_list.append({}) continue fd_list.append(json.loads(data_init)) time.sleep(delay_seconds) except Exception as e: print(f"{e}") with open(path_fd, 'w', encoding='utf-8') as f: for fd in fd_list: f.write(json.dumps(fd, ensure_ascii=False)) f.write("\n")
Warning LINK You are about to visit a link which has been flagged with the above content warnings. Do you wish to continue? Continue Cancel