每日总结

  上次没有对代码进行足够的完善,然后在今天对代码进行细节的处理以及遇到的问题的解决,已经将代码实现完整,其功能主要是爬取到数据后将数据保存在数据表中,

代码如下:

from os import path
import bs4
import requests
from bs4 import BeautifulSoup
import json
import pymysql
import re
import time
import sys;
from time import sleep
from pip._vendor.distlib import database
from urllib.parse import urlparse, parse_qs, parse_qsl

db1 = pymysql.connect(host="localhost", user="root", passwd="", database="", charset='utf8')

def A():
try:
url = "https://c.m.163.com/ug/api/wuhan/app/data/list-total?t=316765403234"
header = {
'authority': 'services9.arcgis.com',
'method': 'GET',
'scheme': 'https',
'origin': 'https://gisanddata.maps.arcgis.com',
'referer': 'https://gisanddata.maps.arcgis.com/apps/opsdashboard/index.html',
'sec-fetch-site': 'same - site',
'sec-fetch-mode': 'cors',
'User-Agent' : 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36'
}
res = requests.get(url, headers = header)
res = res.text
res = json.loads(res)['data']
print(res.keys())
num=res['areaTree'][2]['children']
print(len(num))
id=0
db = pymysql.connect(host="localhost", user="root", passwd="", database="", charset='utf8')
cursor = db.cursor()
cursor.execute('DROP TABLE IF EXISTS info')
sql = """CREATE TABLE info (
province varchar(25),
T_confirm varchar(25), T_suspect varchar(25),
T_heal varchar(25),
T_dead varchar(25),
T_severe varchar(25),
T_storeConfirm varchar(25),
S_confirm varchar(25),
S_suspect varchar(25),
S_hear varchar(25),
S_dead varchar(25),
S_severe varchar(25),
name varchar(25),
id varchar(25))"""
cursor.execute(sql)
id=0
for sheng in num:
print(sheng['name']," "," ")
for data in sheng['children']:
l=[]
province=sheng['name']
T_confirm=data['today']['confirm']
T_suspect=data['today']['suspect']
T_heal=data['today']['heal']
T_dead=data['today']['dead']
T_severe=data['today']['severe']
T_storeConfirm=data['today']['storeConfirm']
S_confirm=data['total']['confirm']
S_suspect=data['total']['suspect']
S_heal=data['total']['heal']
S_dead=data['total']['dead']
S_severe=data['total']['severe']
name=data['name']
id=id+1
l.append(province)
l.append(T_confirm)
l.append(T_suspect)
l.append(T_heal)
l.append(T_dead)
l.append(T_severe)
l.append(T_storeConfirm)
l.append(S_confirm)
l.append(S_suspect)
l.append(S_heal)
l.append(S_dead)
l.append(S_severe)
l.append(name)
l.append(id)
sql="insert into info (province,T_confirm,T_suspect,T_heal,T_dead,T_severe,T_storeConfirm,S_confirm,S_suspect,S_hear,S_dead,S_severe,name,id) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(sql,l)
db.commit()
for i in res['areaTree'][0]['children']:
L = []
sleep(1)
T_confirm=i['total']['confirm']
cursor.execute('insert into bu (name) values (%s)','T_confirm')
db.commit()
T_suspect=i["today"]['suspect']
Contury = i['attributes']['Country_Region']
Confirmed_num = i['attributes']['Confirmed']
Active_num = i['attributes']['Active']
Recovered_num = i['attributes']['Recovered']
Dead_num = i['attributes']['Deaths']
L.append(Contury)
print(T_confirm)

except:
print("error")
A()

在这个代码中,实现了对数据的爬取和对爬取到的结果的解析,然后将数据保存在数据表方便了后续的操作。

原文地址:https://www.cnblogs.com/ruangongwangxiansheng/p/14909109.html