urllib.request
urllib.request模塊定義函數(shù)和類用來打開URLs
urllib.request.urlopen(url, data=None, [timeout, ]*, cafile=None, capath=None, cadefault=False, context=None)
- url:可以是一個字符串連接,也可以是一個Request對象
- data:是訪問URL時要傳送的數(shù)據(jù)
- timeout:訪問超時設置
#-*-coding:UTF-8-*-
import urllib.request #導入模塊
response = urllib.request.urlopen('https://baidu.com') #打開網(wǎng)頁
print(response.read()) #輸出內容
構造Request實例進行訪問
class urllib.request.Request(url, data=None, headers={}, origin_req_host=None, unverifiable=False, method=None)
#--coding:UTF-8--
import urllib.request
request = urllib.request.Request('https://baidu.com')
response = urllib.request.urlopen(request)
print(response.read())
GET方式傳遞數(shù)據(jù)
http://www.guancha.cn/Search/?k=一帶一路
這是在觀察者網(wǎng)搜索一帶一路內容的,瀏覽器上顯示網(wǎng)址
#-*-coding:UTF-8-*-
import urllib.request
values = {}
values['k'] = '一帶一路'
data = urllib.parse.urlencode(values)
print(data) #k=%E4%B8%80%E5%B8%A6%E4%B8%80%E8%B7%AF
url = 'http://www.guancha.cn/Search/?'+data
request = urllib.request.Request(url)
response = urllib.request.urlopen(request)
con = open('g.html','wb')
co = response.read();
con.write(co)
con.close()
###
則g.html里的內容為
<!DOCTYPE html>
<html lang="zh-cmn-Hans">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<meta http-equiv="content-language" content="zh-CN">
<title>搜索結果頁</title>
<link rel="stylesheet" type="text/css" href="../css/public.css">
<link rel="stylesheet" type="text/css" href="../css/main.css">
<meta name="Description" content="觀察者網(wǎng),致力于薈萃中外思想者精華,鼓勵青年學人探索,建中西文化交流平臺,為崛起中的精英提供決策參考。 " />
<meta name="Keywords" content="觀察者網(wǎng),觀察者,春秋綜合研究院,新聞,新媒體,觀察,中國模式,政治,軍事,歷史,評論" />
<title>觀察者網(wǎng)-中國關懷 全球視野</title> <link rel="shortcut icon" />
<script type="text/javascript" src="../js/jquery-1.8.2.min.js"></script>
<script type="text/javascript" src="../js/jquery.pagination.js"></script>
</head>
<body>
<div class="header">
......
POST請求
傳遞參數(shù)需要用到 前文中data參數(shù) (data must be a bytes object)
import urllib.request,urllib.parse
url = 'http://www.xxx.com'
postdata = urllib.parse.urlencode({
'name':'diyinqianchang',
'pass':'88888'
}).encode('UTF-8') #注意傳輸參數(shù)是一個bytes
req = urllib.request.Request(url,postdata)
data = urllib.request.urlopen(req).read()
print(data)
設置Headers
import urllib.request,urllib.parse
url = 'http://www.zhihu.com/#signin'
postdata = urllib.parse.urlencode({
'username':'188\*\*\*\*8091',
'password':'88888888'
}).encode('UTF-8')
req = urllib.request.Request(url,postdata)
req.add_header('User_Agent','Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36') #添加Agent
req.add_header('Referer','https://www.zhihu.com/') #添加反盜鏈
data = urllib.request.urlopen(req).read()
con = open('zhihu.html','wb')
con.write(data)
con.close()
print(data)
另外一種設置方式
headers = {
'User_Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36'
}
data = urllib.parse.urlencode(values)
request = urllib.request.Request(url, data.encode('utf-8'), headers)
#直接利用Request的第三個參數(shù)heards = {}
代理服務器的設置
#coding:UTF-8
import urllib.request
"""
If proxies is given, it must be a dictionary mapping protocol names to URLs of proxies
"""
def use_proxy(proxy_addr,url):
proxy = urllib.request.ProxyHandler({'http':proxy_addr})
opener = urllib.request.build_opener(proxy)
urllib.request.install_opener(opener)
data = urllib.request.urlopen(url).read().decode('utf-8')
return data
proxy_addr = '119.\*\*.\*\*.60:7777'
data = use_proxy(proxy_addr,'https://www.baidu.com')
print(len(data))
###
227
URLError
#coding:UTF-8
"""
處理網(wǎng)絡錯的連個類URLError和HTTPError。后者是前者的子類,在拋出異常中,前者能夠處理的異常較多
URLError能夠處理的異常有:連接補上服務器、遠程URL不存在、無網(wǎng)絡、HTTPError
"""
import urllib.request
import urllib.error
try:
urllib.request.urlopen('http://blog.baiduss.net')
except urllib.error.HTTPError as e:
print(e.code)
print(e.reason)
except urllib.error.URLError as e:
print(e.reason)