import json
import time
import requests
from bs4 import BeautifulSoup

path_f = "followers.txt"
f_list = []
path_fd = "followers_detail.txt"
fd_list = []
delay_seconds = 0.1

headers = {
    "Accept": "*/*",
    "Accept-Language": "ja",
    "Origin": "https://www.nicovideo.jp",
    "Referer": "https://www.nicovideo.jp/",
    "X-Frontend-Id": "6",
    "X-Frontend-Version": "0",
    "X-Niconico-Language": "ja-jp",
}

with open(path_f, 'r', encoding='utf-8') as f:
    f_list = [json.loads(line) for line in f]

for i, f in enumerate(f_list):
    uid = f["data"]["items"][0]["id"]
    url = f"https://www.nicovideo.jp/user/{uid}"
    print(f"[{i}] {url}")

    try:
        res = requests.get(url, headers=headers, timeout=10)
        res.raise_for_status()

        soup = BeautifulSoup(res.text, "html.parser")
        tag = soup.find(id="js-initial-userpage-data")

        if not tag:
            print(f"not found: js-initial-userpage-data")
            fd_list.append({})
            continue

        data_init = tag.get("data-initial-data")
        if not data_init:
            print(f"not found: data-initial-data")
            fd_list.append({})
            continue

        fd_list.append(json.loads(data_init))

        time.sleep(delay_seconds)

    except Exception as e:
        print(f"{e}")

with open(path_fd, 'w', encoding='utf-8') as f:
    for fd in fd_list:
        f.write(json.dumps(fd, ensure_ascii=False))
        f.write("\n")
Edit

Pub: 11 Nov 2025 13:44 UTC

Views: 8