爬虫代理设置

import requests
# 使用proxies关键字
response = requests.get('https://www.baidu.com',
proxies={
# 'http':'http://114.101.18.169:65309',
'socks5':'http://114.101.18.169:65309'
})
# 测试是否访问成功
if response.status_code == 200:
with open('baidu.html','wb') as f:
# 逐行写入,防止占用大量内存
for line in response.iter_lines():
f.write(line)
原文地址:https://www.cnblogs.com/python001-vip/p/12171997.html