python需要に応じて唯品会商品動態データを柔軟に入手

4307 ワード

from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
from pyquery import PyQuery as pq
import time
import random

# window     
browser = webdriver.Chrome(r"C:\Users\dell\AppData\Local\Google\Chrome\Application\chromedriver.exe")
#           
browser.maximize_window()
#            KEYWORD         
KEYWORD = "  "


class VIP(object):
    def search(self):
        """
                
        :param page:      
        :return:
        """
        url = "https://www.vip.com/"
        browser.get(url)
        wait = WebDriverWait(browser, 5)
        #        
        input = wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, ".c-search-input")))
        #         
        #           enter
        submit = wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, ".J-search-button")))
        time.sleep(random.randint(1, 4))
        #          
        input.clear()
        #         
        input.send_keys(KEYWORD)
        time.sleep(random.randint(1, 4))
        #         
        submit.click()
        time.sleep(random.randint(1, 4))
        #         
        self.scroll()
        # browser.execute_script('window.scrollTo(0, document.body.scrollHeight)')
        #            frame            ,     <         
        #                ,          
        time.sleep(random.randint(2, 4))

    def scroll(self):
        """
                
        :return:
        """
        browser.execute_script(""" 
            (function () { 
                var y = document.body.scrollTop; 
                var step = 100; 
                window.scroll(0, y); 
                function f() { 
                    if (y < document.body.scrollHeight) { 
                        y += step; 
                        window.scroll(0, y); 
                        setTimeout(f, 50); 
                    }
                    else { 
                        window.scroll(0, y); 
                        document.title += "scroll-done"; 
                    } 
                } 
                setTimeout(f, 1000); 
            })(); 
            """)

    def getData(self):
        """
              
        :return:
        """
        html = browser.page_source
        doc = pq(html)
        items = doc(".goods-list-item").items()
        for index, item in enumerate(items):
            product = {
                "URL:": "http:{}".format(str(item(".goods-image a").attr("href"))),
                "DISCOUNT_PRICE:": item(".inner-exclusive").text(),
                "VIP_PRICE:": item(".goods-vipshop-wrap").text(),
                "DISCOUNT:": item(".goods-discount-wrap").text().replace("
", " "), "TITLE:": item(".goods-title-info").text() } self.write(product) print(index, product) time.sleep(random.randint(1, 4)) def write(self, content): """ :param content: :return: """ with open(r"vip_iPhone.csv", 'a+', encoding="utf-8") as file: file.write(str(content) + "
") def nextPage(self): """ < :return: """ wait = WebDriverWait(browser, 5) # sumbit = wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, ".cat-paging-next"))) time.sleep(4) sumbit.click() # URL URL current_url = browser.current_url print(current_url) def execute(self): # self.search() # self.getData() # # 19 for i in range(1, 19): # self.nextPage() # self.getData() if __name__ == "__main__": vip = VIP() vip.execute()