;;#!pip install beautifulsoup4 pycurl
;#!pip install beautifulsoup4 requests
(import [bs4 [BeautifulSoup]])
(import re requests time datetime)
(import pycurl)
(import certifi)
(import [io [BytesIO]])
;;from bs4 import BeautifulSoup
;;import re
;;import time,datetime
;;import pycurl
;;import certifi
;;from io import BytesIO
(setv dt_now (.now datetime.datetime))
;;dt_now = datetime.datetime.now()
;;#this_year = dt_now.year
(setv counter 2)
;;counter = 2
(setv last_page 1)
;;last_page = 1
(setv valid 0)
;;valid = 0
(setv onece False)
;;onece = False
(setv invalid 0 closed 0 adver 0 response_err 0)
;;invalid = 0
;;closed = 0
;;adver = 0
;;response_err = 0
(setv location "kyoto" category "/sale-inc/p-")
;;location = 'kyoto'
;;##location = 'all'
;;##category = "/sale-tic/p-"
;;##category = "/sale-tic/p-" #ticket
;;##category = "/sale-boo/p-" #book
;;##category = "/sale-pcp/p-" #pc
;;category = "/sale-inc/p-" #gakki
;;##category = "/coop/p-"
;;##category = "/sale/p-" #sale
;;##category = "/coop-help/p-"#tasuke
;;##category = "/sale-bic/p-" #jitensya
;;##category = "/est-sha/p-" #share house
(setv url_base (+ "https://jmty.jp/" location category))
;;url_base = "https://jmty.jp/" + location + category
;;########################################################################
;;requests;;(setv uAgent {"User-Agent" "Mozilla/5.0 (Linux; Android 9) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.54 Safari/537.36"}) ;;;;★★★★
;;requests;;(setv r-dict {"referer" "https://google.com/"})
;;requests;;(setv r-uAgent {#** r-dict #** uAgent})
(setv comp_match (cut category 0 -2))
;;comp_match = category[0:-2]
(while (> counter 1)
;;while(counter > 1):
(setv counter (- counter 1))
;;counter = counter - 1
(setv url (+ url_base (str counter)))
;;url = url_base + str(counter)
(setv buffer (BytesIO))
;;buffer = BytesIO()
(setv c (.Curl pycurl))
;;c = pycurl.Curl()
(c.setopt c.CAINFO (.where certifi))
;;c.setopt(c.CAINFO, certifi.where())
(c.setopt c.URL url)
;;c.setopt(c.URL, url)
(c.setopt c.WRITEDATA buffer)
;;c.setopt(c.WRITEDATA, buffer)
(.perform c)
;;c.perform()
(setv effective_url (.getinfo c c.EFFECTIVE_URL))
;;effective_url = c.getinfo(c.EFFECTIVE_URL)
(.close c)
;;c.close()
(setv res_byte (.getvalue buffer))
;;res_byte = buffer.getvalue()
(del buffer)
;;del buffer
(setv res_text (.decode res_byte "utf-8"))
;;res_text = res_byte.decode('utf-8')
(del res_byte)
;;del res_byte
;;requests;; (setv session (.Session requests))
;;requests;; (setv response (session.get url :headers r-uAgent)) ;;;;★★★★
;;requests;; (.close response)
;;requests;; (del session)
;;requests;; (setv effective_url url)
;;requests;; (setv res_text response.text)
(setv bs (BeautifulSoup res_text "html.parser"))
;;bs = BeautifulSoup(res_text, "html.parser")
;;# for checking last index page
(if (and (= counter 1) (= onece False))
;;if counter == 1 and onece == False :
(do
(setv onece True)
;;onece = True
;;(try
;;try:
(setv last_exist (.find bs "div" :class_ "last"))
;;last_exist = bs.find("div",{"class":"last"})
(if last_exist
;;if last_exist:
(do
(setv last_page (.get_text (.find bs "li" :class_ "last")))
;;last_page = int(bs.find("li", {"class":"last"}).get_text())
(setv counter (+ (int last_page) 1))
;;counter = last_page + 1
)
;;(except
;;except:
(setv counter 2))
;;counter = 2
(continue)); go to start scraping last index page first and decriment page ,and decrement
;;continue # go to start scraping last index page first and decriment page ,and decrement
)
;(print res_text)
;;#print(res_text)
(setv item_box (.findAll bs "li" :class_ "p-articles-list-item"))
;;item_box = bs.findAll("li", {"class":"p-articles-list-item"})
(setv item_box_count (len item_box))
;;item_box_count = len(item_box)
(print "----------------------------------------------------")
;;print('----------------------------------------------------')
(print effective_url :end " | ")
;;print(effective_url,end=" | ")
(print item_box_count)
;;print(item_box_count)
(print "----------------------------------------------------")
;;print('----------------------------------------------------')
;;########################################################################
(for [i (range item_box_count)]
;;for i in range(item_box_count):
(setv ended (.find (get item_box i) "div" :class_ "p-item-close-text")) ;;closed
;;ended = item_box[i].find("div",{"class":"p-item-close-text"}) #closed
(if ended
;;if ended:# 受付終了
(do
(setv invalid (+ invalid 1))
;;invalid = invalid + 1
(setv closed (+ closed 1))
;;closed = closed + 1
(print "already closed." invalid :end " | ")
;;print("受付終了",invalid,end=" | ")
;;#check ended
(setv check_ended (.find bs "div" :class_ "u-font-bold u-margin-m-t"))
;;check_ended = bs.find("div",{"class":"u-font-bold u-margin-m-t"})
(if check_ended
;;if check_ended:
(do
(setv check_ended_text (.strip (.get_text check_ended)))
;;check_ended_text = check_ended.get_text().strip()
;;#print(check_ended_text)
)
)
))
(setv title (.get_text (.find (get item_box i) "h2" :class_ "p-item-title")))
;;title = item_box[i].find("h2", {"class":"p-item-title"}).get_text()
(setv title (.strip title))
;;title = title.strip()
(setv price (.find (get item_box i) "div" :class_ "p-item-most-important"))
;;price = item_box[i].find("div", {"class":"p-item-most-important"})
(if price
;;if price:
(do
(setv price (.get_text price))
;;price = price.get_text()
(setv price (.strip (str price)))
;;price = str(price).strip()
;;price = price.strip()
))
(setv fav (.find (get item_box i) "span" :class_ "js_fav_user_count u-size-s"))
;;fav = item_box[i].find("span", {"class":"js_fav_user_count u-size-s"})
(if fav
;;if fav :
(do
(setv fav (.get_text fav))
;;fav = fav.get_text()
(setv fav (.strip fav))
;;fav = fav.strip()
)
(setv fav 0))
;;else:
;;fav = "0"
;########################################################################
(for [ii (.select (.find (get item_box i) "h2" :class_ "p-item-title") "a")]
;;for ii in item_box[i].find("h2", {"class":"p-item-title"}).select("a"):
;;(print ii)
;;#print(ii)
(setv subject_url (.get ii "href"))
;;subject_url = ii.get("href")
(if ended
;;if ended:
(do
(print subject_url)
;;print(subject_url)
(continue)
;;continue
))
(setv category_match (re.findall comp_match subject_url))
;;category_match = re.findall(comp_match,subject_url)
(if (= (len category_match) 0)
;;if len(category_match) == 0:# 広告
(do
(.purge re)
;;re.purge()
(setv adver (+ adver 1))
;;adver = adver + 1
(print "広告" adver :end " | ")
;;print("広告",adver,end=" | ")
(print subject_url)
(print "::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
;;print(subject_url)
(continue)
;;continue
))
(.purge re)
;;re.purge()
(setv buffer (BytesIO))
;;buffer = BytesIO()
(setv c (.Curl pycurl))
;;c = pycurl.Curl()
(.setopt c c.CAINFO (.where certifi))
;;c.setopt(c.CAINFO, certifi.where())
(.setopt c c.URL subject_url)
;;c.setopt(c.URL, subject_url)
(.setopt c c.WRITEDATA buffer)
;;c.setopt(c.WRITEDATA, buffer)
;(.sleep time 1)
;;#time.sleep(1)
;;(try
;;try:
(.perform c)
;;c.perform()
;;requests;; (do
;;requests;; (setv session (.Session requests))
;;requests;; (setv response (session.get subject_url :headers r-uAgent)) ;;;;★★★★
;;requests;; (.close response)
;;requests;; (del session)
;;requests;; (setv effective_url subject_url)
;;requests;; (setv html_text response.text)
;;requests;; (setv res_code response.status_code))
;;(except
;;except:
;; (do
;; (c.close)
;; (continue)
;;continue
;;)))
(setv res_code (.getinfo c c.RESPONSE_CODE))
;;res_code = c.getinfo(c.RESPONSE_CODE)
(setv effective_url (.getinfo c c.EFFECTIVE_URL))
;;effective_url = c.getinfo(c.EFFECTIVE_URL)
(.close c)
;;c.close()
;;#print(res_code)
(unless (= res_code 200)
;;if res_code != 200:
(do
(setv response_err (+ response_err 1))
;;response_err = response_err + 1
(continue)
;;continue
))
;########################################################################
(setv res_byte (.getvalue buffer))
;;res_byte = buffer.getvalue()
(del buffer)
;;del buffer
(setv html_text (.decode res_byte "utf-8"))
;;html_text = res_byte.decode('utf-8')
(del res_byte)
;;del res_byte
;;#print(html_text)
(setv bs_detail (BeautifulSoup html_text "html.parser"))
;;bs_detail = BeautifulSoup(html_text, "html.parser")
(setv update_date (.find bs_detail "div" :class_ "p-article-history"))
;;update_date = bs_detail.find("div", {"class":"p-article-history"})
(setv update_daytime (.get_text (get (.findAll update_date "div") 0)))
;;update_daytime = update_date.findAll("div")[0].get_text()
(setv ymdhm (.findall re r"\d+" update_daytime))
;;ymdhm = re.findall(r'\d+',update_daytime)
;# year month day hour minutes
(setv dt (.datetime datetime (int (get ymdhm 0)) (int (get ymdhm 1)) (int (get ymdhm 2)) (int (get ymdhm 3)) (int (get ymdhm 4))))
;;dt = datetime.datetime(int(ymdhm[0]),int(ymdhm[1]),int(ymdhm[2]),int(ymdhm[3]),int(ymdhm[4]))
(.purge re)
;;re.purge()
(setv td (- dt_now dt))
;;td = dt_now - dt
(cond
[(> td.days 41);:# 41 日以上前
;;if td.days > 41:# 41 日以上前
(do
(del bs_detail)
;;del bs_detail
(setv html_text "")
;;html_text = ''
(setv invalid (+ invalid 1))
;;invalid = invalid + 1
(print "41日以上前" invalid :end " | ")
;;print("41日以上前",invalid,end=" | ")
(print effective_url)
(print "::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
;;print(effective_url)
(continue)
;;continue
)]
[(= (.search re "u-text-center u-size-s u-font-bold" html_text) True)
;;elif re.search("u-text-center u-size-s u-font-bold",html_text):
;;# 受付終了
(do
(.purge re)
;;re.purge()
(setv html_text "")
;;html_text = ''
(del bs_detail)
;;del bs_detail
(setv invalid (+ invalid 1))
;;invalid = invalid + 1
(setv closed (+ closed 1))
;;closed = closed + 1
(print "受付終了" invalid :end " | ")
;;print("受付終了",invalid,end=" | ")
(print effective_url)
(print "::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
;;print(effective_url)
(continue)
;;continue
)
])
;;else:# 有効 #u-size-xs u-color-gray u-margin-xs-t
(print "::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
;;print("::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
;#########################################################################
(setv valid (+ valid 1))
;;valid = valid + 1
(print "p-" counter "有効" valid effective_url)
;;print("p-",counter,"有効",valid,effective_url)
(print title price)
;;print(title,price)
(print "::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
;;print("::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
)
)
;#########################################################################
(del bs)
;;del bs
)
(print (+ "all 有効 :" (str valid)))
;;print("all 有効 :" + str(valid))
(print (+ "all invalid :" (str invalid)))
;;print("all invalid :" + str(invalid))
(print (+ "all 受付終了 :" (str closed)))
;;print("all 受付終了 :" + str(closed))
(print (+ "all 広告 :" (str adver)))
;;print("all 広告 :" + str(adver))
(print (+ "response_err :" (str response_err)))
;;print("response_err :" + str(response_err))