Python3實現的爬蟲爬取資料並存入mysql資料庫操作示例
阿新 • • 發佈:2020-01-09
本文例項講述了Python3實現的爬蟲爬取資料並存入mysql資料庫操作。分享給大家供大家參考,具體如下:
爬一個電腦客戶端的訂單。羅總推薦,抓包工具用的是HttpAnalyzerStdV7,與chrome自帶的F12類似。客戶端有接單大廳,羅列所有訂單的簡要資訊。當單子被接了,就不存在了。我要做的是新出訂單就爬取記錄到我的資料庫zyc裡。
設定每10s爬一次。
抓包工具頁面如圖:
首先是爬蟲,先找到資料儲存的頁面,再用正則爬出。
# -*- coding:utf-8 -*- import re import requests import pymysql #Python3的mysql模組,Python2 是mysqldb import datetime import time def GetResults(): requests.adapters.DEFAULT_RETRIES = 5 #有時候報錯,我在網上找的不知道啥意思,好像也沒用。 reg = [r'"id":(.*?),',r'"order_no":"(.*?)",r'"order_title":"(.*?)",r'"publish_desc":"(.*?)",r'"game_area":"(.*?)\\/(.*?)\\/(.*?)",r'"order_current":"(.*?)",r'"order_content":"(.*?)",r'"order_hours":(.*?),r'"order_price":"(.*?)",r'"add_price":"(.*?)",r'"safe_money":"(.*?)",r'"speed_money":"(.*?)",r'"order_status_desc":"(.*?)",r'"order_lock_desc":"(.*?)",r'"cancel_type_desc":"(.*?)",r'"kf_status_desc":"(.*?)",r'"is_show_pwd":(.*?),r'"game_pwd":"(.*?)",r'"game_account":"(.*?)",r'"game_actor":"(.*?)",r'"left_hours":"(.*?)",r'"created_at":"(.*?)",r'"account_id":"(.*?)",r'"mobile":"(.*?)",r'"contact":"(.*?)",r'"qq":"(.*?)"},'] results=[] try: for l in range(1,2): #頁碼 proxy = {'HTTP':'61.135.155.82:443'} #代理ip html = requests.get('https://www.dianjingbaozi.com/api/dailian/soldier/hall?access_token=3ef3abbea1f6cf16b2420eb962cf1c9a&dan_end=&dan_start=&game_id=2&kw=&order=price_desc&page=%d'%l+'&pagesize=30&price_end=0&price_start=0&server_code=000200000000&sign=ca19072ea0acb55a2ed2486d6ff6c5256c7a0773×tamp=1511235791&type=public&type_id=%20HTTP/1.1',proxies=proxy) # 用get的方式訪問。網頁解碼成中文。接單大廳頁。 # html=html.content.decode('utf-8') outcome_reg_order_no = re.findall(r'"order_no":"(.*?)","game_area"',html) #獲取訂單編號,因為訂單詳情頁url與訂單編號有關。 for j in range(len(outcome_reg_order_no)): html_order = requests.get('http://www.lpergame.com/api/dailian/order/detail?access_token=eb547a14bad97e1ee5d835b32cb83ff1&order_no=' +outcome_reg_order_no[j] + '&sign=c9b503c0e4e8786c2945dc0dca0fabfa1ca4a870×tamp=1511146154 HTTP/1.1',proxies=proxy) #訂單詳細頁 html_order=html_order.content.decode('utf-8') # print(html_order) outcome_reg = [] for i in range(len(reg)):#每條訂單 outcome = re.findall(reg[i],html_order) if i == 4: for k in range(len(outcome)): outcome_reg.extend(outcome[k]) else: outcome_reg.extend(outcome) results.append(outcome_reg) #結果集 return results except: time.sleep(5) #有時太頻繁會報錯。 print("失敗") pass
根據爬蟲結果建表,這裡變數名要準確。並且要設定唯一索引,使每次爬的只有新訂單入庫。
def mysql_create(): mysql_host = '' mysql_db = 'zyc' mysql_user = 'zyc' mysql_password = '' mysql_port = 3306 db = pymysql.connect(host=mysql_host,port=mysql_port,user=mysql_user,password=mysql_password,db=mysql_db,charset='utf8') # 連線資料庫編碼注意是utf8,不然中文結果輸出會亂碼 sql_create = "CREATE TABLE DUMPLINGS (id CHAR(10),order_no CHAR(50),order_title VARCHAR(265),publish_desc VARCHAR(265),game_name VARCHAR(265),"\ "game_area VARCHAR(265),game_area_distinct VARCHAR(265),order_current VARCHAR(3908),order_content VARCHAR(3908),order_hours CHAR(10)," \ "order_price FLOAT(10),add_price FLOAT(10),safe_money FLOAT(10),speed_money FLOAT(10),order_status_desc VARCHAR(265),"\ "order_lock_desc VARCHAR(265),cancel_type_desc VARCHAR(265),kf_status_desc VARCHAR(265),is_show_pwd TINYINT,game_pwd CHAR(50),"\ "game_account VARCHAR(265),game_actor VARCHAR(265),left_hours VARCHAR(265),created_at VARCHAR(265),account_id CHAR(50),"\ "mobile VARCHAR(265),mobile2 VARCHAR(265),contact VARCHAR(265),contact2 VARCHAR(265),qq VARCHAR(265),"\ "PRIMARY KEY (`id`),UNIQUE KEY `no`(`order_no`))ENGINE=InnoDB AUTO_INCREMENT=12 DEFAULT CHARSET=utf8" sql_key="CREATE UNIQUE INDEX id ON DUMPLINGS(id)" cursor = db.cursor() cursor.execute("DROP TABLE IF EXISTS DUMPLINGS") cursor.execute(sql_create)# 執行SQL語句 cursor.execute(sql_key) db.close() # 關閉資料庫連
把資料匯入Mysql,注意編碼和欄位之間的匹配。
def IntoMysql(results): mysql_host = '' mysql_db = 'zyc' mysql_user = 'zyc' mysql_password = '' mysql_port = 3306 db = pymysql.connect(host=mysql_host,charset='utf8') # 連線資料庫編碼注意是utf8,不然中文結果輸出會亂碼 cursor = db.cursor() for j in range(len(results)): try: sql = "INSERT INTO DUMPLINGS(id,order_no,order_title,publish_desc,game_name," \ "game_area,game_area_distinct,order_current,order_content,order_hours," \ "order_price,add_price,safe_money,speed_money,order_status_desc," \ "order_lock_desc,cancel_type_desc,kf_status_desc,is_show_pwd,game_pwd," \ "game_account,game_actor,left_hours,created_at,account_id," \ "mobile,mobile2,contact,contact2,qq) VALUES (" for i in range(len(results[j])): sql = sql + "'" + results[j][i] + "'," sql = sql[:-1] + ")" sql = sql.encode('utf-8') cursor.execute(sql) db.commit() except:pass db.close()
每十秒執行一次。
mysql_create() i=0 while True: results = GetResults() IntoMysql(results) i=i+1 print("爬蟲次數:",i) time.sleep(10)
結果如圖:
更多關於Python相關內容可檢視本站專題:《Python Socket程式設計技巧總結》、《Python正則表示式用法總結》、《Python資料結構與演算法教程》、《Python函式使用技巧總結》、《Python字串操作技巧彙總》、《Python+MySQL資料庫程式設計入門教程》及《Python常見資料庫操作技巧彙總》
希望本文所述對大家Python程式設計有所幫助。