[オリジナル]Pythonはレスポンスヘッダのみを取得し、エンティティを取得しません.

12697 ワード

Python Just get Resonse Headers,not get content.
1.Use HEAD method
>>> import requests
>>> res = requests.head("http://www.baidu.com/")
>>> req.head("https://www.baidu.com/").headers
{'Content-Encoding': 'gzip', 'Server': 'bfe/1.0.8.18', 'Last-Modified': 'Mon, 13 Jun 2016 02:50:08 GMT', 'Connection': 'Keep-Alive', 'Pragma': 'no-cache', 'Cache-Control': 'private, no-cache, no-store, proxy-revalidate, no-transform', 'Date': 'Fri, 13 Oct 2017 04:36:20 GMT', 'Content-Type': 'text/html'}
>>> res.ok
True
>>> res.content
''
#          ,   ,        HEAD,      HEAD.
#         
#
>>> res = req.head("https://www.douban.com/subject/1/")
>>> res
403]>
>>> res.ok
False
>>> res.content
''
>>> res.headers
{'Content-Encoding': 'gzip', 'Keep-Alive': 'timeout=30', 'Server': 'dae', 'Connection': 'keep-alive', 'Date': 'Fri, 13 Oct 2017 04:39:00 GMT', 'Content-Type': 'text/html'}
あまり通用しません.サーバーによってはサポートされていないものがあります.
2.Use urllib
import urllib
>>> res = urllib.urlopen("http://127.0.0.1:8000/git.exe")
>>> res.url
'http://127.0.0.1:8000/git.exe'
>>> res.headers.headers
['Server: SimpleHTTP/0.6 Python/2.7.10\r
'
, 'Date: Fri, 13 Oct 2017 06:06:37 GMT\r
'
, 'Content-type: application/x-msdownload\r
'
, 'Content-Length: 7569408\r
'
, 'Last-Modified: Fri, 16 Dec 2016 07:09:32 GMT\r
'
] >>> len(r.read()) 7569408 # urllib read/readline/readlines web . # urllib/httplib . # urllib.py def urlopen(url, ...): opener = FancyURLopener() return opener.open(url) class FancyURLopener(URLopener).open(): getattr(self, name)(url) class URLopener.open_http(): errcode, errmsg, headers = h.getreply() if(200 <= errcode < 300): return addinfourl(fp, headers, "http:" + url, errcode) else: if data is None: return self.http_error(url, fp, errcode, errmsg, headers) else: return self.http_error(url, fp, errcode, errmsg, headers, data) class URLopener.http_error(): return method(url, fp, errcode, errmsg, headers) class FancyURLopener.http_error_default(): return addinfourl(fp, headers, "http:" + url, errcode) class addinfourl(addbase): # fp , . class addbase.__init __(): self.fp = fp self.read = self.fp.read self.readline = self.fp.readline if hasattr(self.fp, "readlines"): self.readlines = self.fp.readlines self.fileno = self.fp.fileno # ... ...
見られます.urllib.openは最終的にaddbaseに戻りました.addbaseはsocketに対して任務処理をしていません.読み書きは一切ありません.その後、read/readline/readlineを呼び出してからウェブサーバからデータを読み取ります.
図1.初期化ネットワーク.Init Network
図2.urlopen()の後Get Header
図3.read()後Get Content
3.Use socket
urllibを見たら、socketを使って一つの方法を書いて、headerだけを得ることができます.
import socket
import ssl


_timeout = 10
socket.setdefaulttimeout(_timeout)

def get_header(host, port=80, uri="/", method="GET", user_ssl=False):
    #          ,    headers
    conn = None
    header = """%s %s HTTP/1.1\r
Host: %s\r
Connection: close\r
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36\r
\r
"""
% ( method, uri, host) if user_ssl: ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) _socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) conn = ssl_context.wrap_socket(_socket, server_hostname=host) conn.connect((host, port)) conn.send(header) else: conn = socket.create_connection((host, port), _timeout) conn.sendall(header) text = "" while True: if "\r
\r
"
in text: break buff = conn.recv(10) text += buff # print buff conn.close() return text.split("\r
\r
"
)[0] if __name__ == '__main__': print get_header("www.douban.com", uri="/subject/27076001/") print print get_header("www.douban.com", uri="/subject/27076001/", port=443, user_ssl=True)
76[14:48:20]zhipeng@zhipeng-MacBook ~/demo/python
�� $ python test_header.py
HTTP/1.1 301 Moved Permanently
Date: Fri, 13 Oct 2017 06:48:23 GMT
Content-Type: text/html
Content-Length: 178
Connection: close
Location: https://www.douban.com/subject/27076001/
Server: dae

HTTP/1.1 302 Moved Temporarily
Server: ADSSERVER/45863
Date: Fri, 13 Oct 2017 06:48:23 GMT
Content-Type: text/html
Transfer-Encoding: chunked
Connection: close
Location: https://sec.douban.com/b?r=https%3A%2F%2Fwww.douban.com%2Fsubject%2F27076001%2F
Strict-Transport-Security: max-age=15552000;
Set-Cookie: __ads_session=uY8l3pLW/AjCKJ8Y4wA=; domain=.douban.com; path=/
X-Powered-By-ADS: uni-jnads-1-0277[14:48:23]zhipeng@zhipeng-MacBook ~/demo/python 
�� $ 
参照
<https://stackoverflow.com/questions/32062925/python-socket-server-handle-https-request)