1. 程式人生 > >selenium+python+phantomjs爬蟲部落格排行榜

selenium+python+phantomjs爬蟲部落格排行榜

#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2017/4/11 19:21
# @Author  : wanghaitao
# @File    : spider.py
# @Software: PyCharm
import codecs


from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import re




def crawl(driver, url):
    driver.get(url)
    print("CSDN排行榜: \t文章周排行 \t 瀏覽總量")
    infofile = codecs.open("Result_csdn.txt", 'a', 'utf-8')
    print('爬取資訊如下:\n')


    content = driver.find_elements_by_xpath('/html/body/div[5]/div[1]/ul/li')
    # print(content)
    for item in content:
        result = item.find_element_by_tag_name('em').text.split('.')[0]\
                  + ':\t'\
                  + item.find_element_by_tag_name('a').text\
                  + '\t\t\t'\
                  + item.find_element_by_tag_name('b').text + '\n'
        print(result)
        infofile.write(result)
        # print(item.find_element_by_tag_name('em').text+':/t'+item.find_element_by_tag_name('lebel').text)
        # content = driver.find_elements_by_xpath("//div[@class='item']")
        # for tag in content:
        #     print(tag.text)


        # print(driver.find_element_by_xpath('/html/body/div[5]/div[1]'))
        #
        # content = driver.find_elements_by_xpath("//h3[text()='文章周排行']//li")
        # for tag in content:
        #     print (tag.text)
        #     infofile.write(tag.text + "\r\n")
        #     print('')


        # elem = driver.find_elements_by_tag_name('li')
        # for tag in elem:
        #     print(tag.find_element_by_tag_name('//*[@id="content"]/div/div[1]/em').text)
        # for tag in elem:
        # print(driver.find_element_by_tag_name("//*[@id='content']/div/div[%d]/ol/li[10]/div/div[1]/em)" % index))
        # print(tag.find_element_by_tag_name('em'))
        # print('tag hi',index)
        # index = index+1
        # driver.close()




if __name__ == '__main__':
    print('this is main function:')
    URL = 'http://blog.csdn.net/ranking.html'
    Driver = webdriver.PhantomJS()
    # Driver = webdriver.Chrome()
    crawl(Driver, URL)
    Driver.close()