爬取目标:雪球网(起始url:https://xueqiu.com/hq#exchange=CN&firstName=1&secondName=1_0&page=1)
爬取内容:雪球网深沪股市情况
使用工具:requests库实现发送请求、获取响应。
json格式的动态加载数据实现数据解析、提取。
pymysql进行数据存储
思路:对该网站的动态加载数据的请求方式进行控制变量的发送请求,最终得到实际有效的参数。
项目重点:使用抓包工具分析发送数据请求到json格式的cookie数据,这是此次动态抓取的重点
直接放代码(详细说明在注释里,欢迎同行相互交流、学习~):
1 import requests
2 import json
3 import pymysql
4
5 class mysql_conn(object):
6 # 魔术方法, 初始化, 构造函数
7 def __init__(self):
8 self.db = pymysql.connect(host='127.0.0.1', user='root', password='abc123', port=3306, database='py1011')
9 self.cursor = self.db.cursor()
10 # 执行modify(修改)相关的操作
11 def execute_modify_mysql(self, sql):
12 self.cursor.execute(sql)
13 self.db.commit()
14 # 魔术方法, 析构化 ,析构函数
15 def __del__(self):
16 self.cursor.close()
17 self.db.close()
18
19 headers = {
20 # 使用抓包工具分析发送数据请求到json格式的cookie数据,这是此次动态抓取的重点
21 'Cookie': xq_a_token=584d0cf8d5a5a9809761f2244d8d272bac729ed4; xq_r_token=98f278457fc4e1e5eb0846e36a7296e642b8138a;
22 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36',
23 }
24 url = 'https://xueqiu.com/v4/statuses/public_timeline_by_category.json?since_
25
26 response = requests.get(url,headers=headers)
27
28 res_dict = json.loads(response.text)
29
30 list_lsit = res_dict['list']
31
32 db ={}
33 for list_item_dict in list_lsit:
34 data_dict = json.loads(list_item_dict['data'])
35
36 db['id'] = data_dict['id']
37 db['title'] = data_dict['title']
38 db['description'] = data_dict['description']
39 db['target'] = data_dict['target']
40 try:
41 sql = 'insert into xueqiu (uid,title,description,target) values ("{id}","{title}","{description}","{traget}")'.fromart(**db)
42 mc = mysql_conn()
43 mc.execute_modify_mysql(sql)
44 except:
45 pass