首页 > 编程 > Python > 正文

Python实现爬虫设置代理IP和伪装成浏览器的方法分享

2020-01-04 15:10:39
字体:
来源:转载
供稿:网友

1.python爬虫浏览器伪装

#导入urllib.request模块import urllib.request#设置请求头headers=("User-Agent","Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0")#创建一个openeropener=urllib.request.build_opener()#将headers添加到opener中opener.addheaders=[headers]#将opener安装为全局urllib.request.install_opener(opener)#用urlopen打开网页data=urllib.request.urlopen(url).read().decode('utf-8','ignore')

2.设置代理

#定义代理ipproxy_addr="122.241.72.191:808"#设置代理proxy=urllib.request.ProxyHandle({'http':proxy_addr})#创建一个openeropener=urllib.request.build_opener(proxy,urllib.request.HTTPHandle)#将opener安装为全局urllib.request.install_opener(opener)#用urlopen打开网页data=urllib.request.urlopen(url).read().decode('utf-8','ignore')

3.同时设置用代理和模拟浏览器访问

#定义代理ipproxy_addr="122.241.72.191:808"#创建一个请求req=urllib.request.Request(url)#添加headersreq.add_header("User-Agent","Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko)#设置代理proxy=urllib.request.ProxyHandle("http":proxy_addr)#创建一个openeropener=urllib.request.build_opener(proxy,urllib.request.HTTPHandle)#将opener安装为全局urllib.request.install_opener(opener)#用urlopen打开网页data=urllib.request.urlopen(req).read().decode('utf-8','ignore')

4.在请求头中添加多个信息

import urllib.requestpage_headers={"User-Agent":"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0",       "Host":"www.baidu.com",       "Cookie":"xxxxxxxx"       }req=urllib.request.Request(url,headers=page_headers)data=urllib.request.urlopen(req).read().decode('utf-8','ignore')

5.添加post请求参数

import urllib.requestimport urllib.parse#设置post参数page_data=urllib.parse.urlencode([                  ('pn',page_num),                  ('kd',keywords)                  ])#设置headerspage_headers={     'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0',     'Connection':'keep-alive',     'Host':'www.lagou.com',     'Origin':'https://www.lagou.com',     'Cookie':'JSESSIONID=ABAAABAABEEAAJA8F28C00A88DC4D771796BB5C6FFA2DDA; user_trace_token=20170715131136-d58c1f22f6434e9992fc0b35819a572b',     'Accept':'application/json, text/javascript, */*; q=0.01',     'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',     'Referer':'https://www.lagou.com/jobs/list_%E6%95%B0%E6%8D%AE%E6%8C%96%E6%8E%98?labelWords=&fromSearch=true&suginput=',     'X-Anit-Forge-Token':'None',     'X-Requested-With':'XMLHttpRequest'     }#打开网页req=urllib.request.Request(url,headers=page_headers)data=urllib.request.urlopen(req,data=page_data.encode('utf-8')).read().decode('utf-8')

6.利用phantomjs模拟浏览器请求

#1.下载phantomjs安装到本地,并设置环境变量from selenium import webdriverbs=webdriver.PhantomJS()#打开urlbs.get(url)#获取网页源码url_data=bs.page_source#将浏览到的网页保存为图片bs.get_screenshot_as_file(filename)

7.phantomjs设置user-agent和cookie

from selenium import webdriverfrom selenium.webdriver.common.desired_capabilities import DesiredCapabilitiesdcap = dict(DesiredCapabilities.PHANTOMJS)dcap["phantomjs.page.settings.userAgent"] = ("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0")bs = webdriver.PhantomJS(desired_capabilities=dcap)bs.get(url)#删除cookiebs.delete_all_cookies()#设置cookie#cookie格式:在浏览器cookie中查看,一个cookie需要包含以下参数,domain、name、value、pathcookie={  'domain':'.www.baidu.com', #注意前面有.  'name':'xxxx',   'value':'xxxx',   'path':'xxxx'  }#向phantomjs中添加cookiebs.add_cookie(cookie)

8.利用web_driver工具

#1.下载web_driver工具(如chromdriver.exe)及对应的浏览器#2.将chromdriver.exe放到某个目录,如c:/chromdriver.exefrom selenium import webdriverdriver=webdriver.Chrome(executable_path="C:/chromdriver.exe")#打开urldriver.get(url)

以上这篇Python实现爬虫设置代理IP和伪装成浏览器的方法分享就是小编分享给大家的全部内容了,希望能给大家一个参考,也希望大家多多支持VEVB武林网。


注:相关教程知识阅读请移步到python教程频道。
发表评论 共有条评论
用户名: 密码:
验证码: 匿名发表