#!pip install beautifulsoup4 requests

from bs4 import BeautifulSoup
import re
import time,datetime
import requests

dt_now = datetime.datetime.now()
#this_year = dt_now.year

counter = 2
last_page = 1
valid = 0
onece = False
invalid = 0
closed = 0

adver = 0
response_err = 0

location = 'kyoto'
##location = 'all'
#category = "/sale-tic/p-"
##category = "/sale-tic/p-" #ticket
##category = "/sale-boo/p-" #book
##category = "/sale-pcp/p-" #pc
category = "/sale-inc/p-" #gakki
##category = "/coop/p-"
##category = "/sale/p-"     #sale
##category = "/coop-help/p-"#tasuke
##category = "/sale-bic/p-" #jitensya
##category = "/est-sha/p-"  #share house
url_base = "https://jmty.jp/" + location + category
########################################################################
comp_match = category[0:-2]

while(counter > 1):

    counter = counter - 1
    url = url_base + str(counter)
    res = requests.get(url)
    effective_url = res.url
    res_text = res.text
    del res

    bs = BeautifulSoup(res_text, "html.parser")

    # for checking last index page
    if counter == 1 and onece == False :
        onece = True
        try:
            last_exist = bs.find("div",{"class":"last"})
            if last_exist:
                last_page  = int(bs.find("li", {"class":"last"}).get_text())

                counter = last_page + 1
        except:
            counter = 2
        continue # go to start scraping last index page first and decriment page ,and decrement

    #print(res_text)

    item_box = bs.findAll("li", {"class":"p-articles-list-item"})
    item_box_count = len(item_box)
    print('----------------------------------------------------')
    print(effective_url,end=" | ")
    print(item_box_count)
    print('----------------------------------------------------')

########################################################################
    for i in range(item_box_count):
        ended = item_box[i].find("div",{"class":"p-item-close-text"}) #closed
        if ended:# 受付終了
            invalid = invalid + 1
            closed = closed + 1
            print("受付終了",invalid,end=" | ")
            #check ended
            check_ended = bs.find("div",{"class":"u-font-bold u-margin-m-t"})
            if check_ended:
                check_ended_text = check_ended.get_text().strip()
                #print(check_ended_text)

        title = item_box[i].find("h2", {"class":"p-item-title"}).get_text()
        title = title.strip()

        price = item_box[i].find("div", {"class":"p-item-most-important"})
        if price:
            price = price.get_text()
            price = str(price).strip()
            price = price.strip()

        fav = item_box[i].find("span", {"class":"js_fav_user_count u-size-s"})
        if fav :
            fav = fav.get_text()
            fav = fav.strip()
        else:
            fav = "0"
########################################################################
        for ii in item_box[i].find("h2", {"class":"p-item-title"}).select("a"):
            #print(ii)
            subject_url = ii.get("href")
            if ended:
                print(subject_url)
                continue

            category_match = re.findall(comp_match,subject_url)
            if len(category_match) == 0:# 広告
                re.purge()

                adver = adver + 1
                print("広告",adver,end=" | ")
                print(subject_url)
                print("::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
                continue
            re.purge()

            #time.sleep(1)

            try:
                res = requests.get(subject_url)

            except:
                continue

            res_code = res.status_code
            effective_url = res.url
            #print(res_code)
            if res_code != 200:
                response_err = response_err + 1
                continue
########################################################################

            html_text = res.text
            del res

            #print(html_text)

            bs_detail = BeautifulSoup(html_text, "html.parser")

            update_date = bs_detail.find("div", {"class":"p-article-history"})
            update_daytime = update_date.findAll("div")[0].get_text()

            ymdhm  = re.findall(r'\d+',update_daytime)
            # year month day hour minutes
            dt = datetime.datetime(int(ymdhm[0]),int(ymdhm[1]),int(ymdhm[2]),int(ymdhm[3]),int(ymdhm[4]))
            re.purge()
            td = dt_now - dt
            if td.days > 41:# 41 日以上前
                del bs_detail
                html_text = ''
                invalid = invalid + 1
                print("41日以上前",invalid,end=" | ")
                print(effective_url)
                print("::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
                continue
            elif re.search("u-text-center u-size-s u-font-bold",html_text):
                # 受付終了
                re.purge()
                html_text = ''
                del bs_detail
                invalid = invalid + 1
                closed = closed + 1
                print("受付終了",invalid,end=" | ")
                print(effective_url)
                print("::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
                continue
            else:# 有効 #u-size-xs u-color-gray u-margin-xs-t
                print("::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
#########################################################################
                valid = valid + 1
                print("p-",counter,"有効",valid,effective_url)
                print(title,price)
                print("::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
#########################################################################
    del bs

print("all 有効 :" + str(valid))
print("all invalid :" + str(invalid))
print("all 受付終了 :" + str(closed))
print("all 広告 :" + str(adver))
print("response_err :" + str(response_err))
Edit Report
Pub: 26 Dec 2021 13:36 UTC
Edit: 26 Dec 2021 13:38 UTC
Views: 139