DecryptLogin
github
import scrapy import re class GithubSpider(scrapy.Spider): name = 'github' allowed_domains = ['github.com'] start_urls = ['https://github.com/login'] def parse(self, response): authenticity_token = response.xpath("//input[@name='authenticity_token']/@value").extract_first() utf8 = response.xpath("//input[@name='utf8']/@value").extract_first() commit = response.xpath("//input[@name='commit']/@value").extract_first() # print(authenticity_token) # print(utf8) # print(commit) post_data = dict( login="*************", password="**********", authenticity_token=authenticity_token, utf8='✓', commit='Sign in', ) yield scrapy.FormRequest( url='https://github.com/session', formdata=post_data, callback=self.after_login ) def after_login(self,response): print(re.findall("oookkkssss",response.body.decode())) # print(response.body.decode())
参考:https://blog.csdn.net/kingx3/article/details/108396493
import scrapy from ps import k_login,k_password class GloginSpider(scrapy.Spider): name = 'github1' allowed_domains = ['github.com'] start_urls = ['https://github.com/login'] url = 'https://github.com/session' # def start_requests(self): def parse(self, response): commit = 'Sign in' authenticity_token = response.xpath('//input[@name="authenticity_token"]/@value').extract_first() # .extract_first是在选择器列表中返回第一个列表值 # 等同于.extract()[0],也可以是[0].extract() # 测试中发现ga_id在登录前为空,尝试不添加这一项 # ga_id = response.xpath('//meta[@name="octolytics-dimension-ga_id"]/@content').extract() login = k_login password = k_password timestamp = response.xpath('//input[contains(@name,"timestamp")]/@value')[0].extract() timestamp_secret = response.xpath('//input[contains(@name,"timestamp")]/@value')[1].extract() # print(authenticity_token) # print(ga_id) # print(timestamp,timestamp_secret) # 定义一个字典提交数据 form_data = { 'commit': commit, 'authenticity_token': authenticity_token, # 'ga_id': ga_id, 'login': login, 'password': password, 'webauthn-support': 'supported', 'webauthn-iuvpaa-support': 'unsupported', 'timestamp': timestamp, 'timestamp_secret': timestamp_secret, } # scrapy.Request只支持get请求 # post请求可以使用.FormRequest,默认为POST yield scrapy.FormRequest( url=self.url, formdata=form_data, callback=self.after_login ) def after_login(self,response): # print(response.body.decode('utf-8')) with open('./github.html','w',encoding='utf-8') as f: f.write(response.body.decode('utf-8'))