爬虫【自动登陆github和抽屉】

自动登陆github用户详情页

代码

#! /usr/bin/env python
# -*- coding: utf-8 -*-
# __author__ = "wuxiaoyu"
# Date: 2017/10/15


import requests
from bs4 import BeautifulSoup


# 1. 获取token和cookie
r1 = requests.get(url='https://github.com/login')
s1 = BeautifulSoup(r1.text, 'html.parser')
val = s1.find(attrs={'name': 'authenticity_token'}).get('value')
# cookie返回给你
r1_cookie_dict = r1.cookies.get_dict()



# 2. 发送用户认证
r2 = requests.post(
    url='https://github.com/session',
    data={
        'commit': 'Sign in',
        'utf8': '',
        'authenticity_token': val,
        'login': '此处是账号',
        'password': '此处是密码',

    },
    cookies=r1_cookie_dict
)
r2_cookie_dict = r2.cookies.get_dict()  # {}


all_cookie_dict = {}
all_cookie_dict.update(r2_cookie_dict)
all_cookie_dict.update(r1_cookie_dict)
# 备注:如果两次请求都返回这个相同的cookie,可能会被覆盖,这样需要换一下update的顺序即可


r3 = requests.get(
    url='https://github.com/Tenderrain', #自己个人信息页面
    cookies=all_cookie_dict
)

print(r3.text)  # 登录成功之后,可以查看的页面

自动登陆抽屉论坛

代码

"""
1.
    r0 = request.get()
    r0_cookie_dict = r0.cookies.get_dict()


2.
    r1 = requests.post(
        url="http://dig.chouti.com/login",
        data={
            'phone':'xxx',
            'password':'xx',
            'oneMonth':1
        },
        cookies=r0_cookie_dict
    )

    print(r1.text)

    r1_cookie_dict = r1.cookies.get_dict()



3.
    r2 = requests.post(url='http://dig.chouti.com/link/vote?linksId=14708366',cookies=r0_cookie_dict)
    r2.text

问题:自动登录并点赞

"""
原文地址:https://www.cnblogs.com/robinunix/p/7704050.html