Edit File by line
/home/barbar84/public_h.../wp-conte.../plugins/sujqvwi/ShExBy/shex_roo.../lib64/python3..../urllib
File: request.py
"""An extensible library for opening URLs using a variety of protocols
[0] Fix | Delete
[1] Fix | Delete
The simplest way to use this module is to call the urlopen function,
[2] Fix | Delete
which accepts a string containing a URL or a Request object (described
[3] Fix | Delete
below). It opens the URL and returns the results as file-like
[4] Fix | Delete
object; the returned object has some extra methods described below.
[5] Fix | Delete
[6] Fix | Delete
The OpenerDirector manages a collection of Handler objects that do
[7] Fix | Delete
all the actual work. Each Handler implements a particular protocol or
[8] Fix | Delete
option. The OpenerDirector is a composite object that invokes the
[9] Fix | Delete
Handlers needed to open the requested URL. For example, the
[10] Fix | Delete
HTTPHandler performs HTTP GET and POST requests and deals with
[11] Fix | Delete
non-error returns. The HTTPRedirectHandler automatically deals with
[12] Fix | Delete
HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
[13] Fix | Delete
deals with digest authentication.
[14] Fix | Delete
[15] Fix | Delete
urlopen(url, data=None) -- Basic usage is the same as original
[16] Fix | Delete
urllib. pass the url and optionally data to post to an HTTP URL, and
[17] Fix | Delete
get a file-like object back. One difference is that you can also pass
[18] Fix | Delete
a Request instance instead of URL. Raises a URLError (subclass of
[19] Fix | Delete
OSError); for HTTP errors, raises an HTTPError, which can also be
[20] Fix | Delete
treated as a valid response.
[21] Fix | Delete
[22] Fix | Delete
build_opener -- Function that creates a new OpenerDirector instance.
[23] Fix | Delete
Will install the default handlers. Accepts one or more Handlers as
[24] Fix | Delete
arguments, either instances or Handler classes that it will
[25] Fix | Delete
instantiate. If one of the argument is a subclass of the default
[26] Fix | Delete
handler, the argument will be installed instead of the default.
[27] Fix | Delete
[28] Fix | Delete
install_opener -- Installs a new opener as the default opener.
[29] Fix | Delete
[30] Fix | Delete
objects of interest:
[31] Fix | Delete
[32] Fix | Delete
OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages
[33] Fix | Delete
the Handler classes, while dealing with requests and responses.
[34] Fix | Delete
[35] Fix | Delete
Request -- An object that encapsulates the state of a request. The
[36] Fix | Delete
state can be as simple as the URL. It can also include extra HTTP
[37] Fix | Delete
headers, e.g. a User-Agent.
[38] Fix | Delete
[39] Fix | Delete
BaseHandler --
[40] Fix | Delete
[41] Fix | Delete
internals:
[42] Fix | Delete
BaseHandler and parent
[43] Fix | Delete
_call_chain conventions
[44] Fix | Delete
[45] Fix | Delete
Example usage:
[46] Fix | Delete
[47] Fix | Delete
import urllib.request
[48] Fix | Delete
[49] Fix | Delete
# set up authentication info
[50] Fix | Delete
authinfo = urllib.request.HTTPBasicAuthHandler()
[51] Fix | Delete
authinfo.add_password(realm='PDQ Application',
[52] Fix | Delete
uri='https://mahler:8092/site-updates.py',
[53] Fix | Delete
user='klem',
[54] Fix | Delete
passwd='geheim$parole')
[55] Fix | Delete
[56] Fix | Delete
proxy_support = urllib.request.ProxyHandler({"http" : "http://ahad-haam:3128"})
[57] Fix | Delete
[58] Fix | Delete
# build a new opener that adds authentication and caching FTP handlers
[59] Fix | Delete
opener = urllib.request.build_opener(proxy_support, authinfo,
[60] Fix | Delete
urllib.request.CacheFTPHandler)
[61] Fix | Delete
[62] Fix | Delete
# install it
[63] Fix | Delete
urllib.request.install_opener(opener)
[64] Fix | Delete
[65] Fix | Delete
f = urllib.request.urlopen('http://www.python.org/')
[66] Fix | Delete
"""
[67] Fix | Delete
[68] Fix | Delete
# XXX issues:
[69] Fix | Delete
# If an authentication error handler that tries to perform
[70] Fix | Delete
# authentication for some reason but fails, how should the error be
[71] Fix | Delete
# signalled? The client needs to know the HTTP error code. But if
[72] Fix | Delete
# the handler knows that the problem was, e.g., that it didn't know
[73] Fix | Delete
# that hash algo that requested in the challenge, it would be good to
[74] Fix | Delete
# pass that information along to the client, too.
[75] Fix | Delete
# ftp errors aren't handled cleanly
[76] Fix | Delete
# check digest against correct (i.e. non-apache) implementation
[77] Fix | Delete
[78] Fix | Delete
# Possible extensions:
[79] Fix | Delete
# complex proxies XXX not sure what exactly was meant by this
[80] Fix | Delete
# abstract factory for opener
[81] Fix | Delete
[82] Fix | Delete
import base64
[83] Fix | Delete
import bisect
[84] Fix | Delete
import email
[85] Fix | Delete
import hashlib
[86] Fix | Delete
import http.client
[87] Fix | Delete
import io
[88] Fix | Delete
import os
[89] Fix | Delete
import posixpath
[90] Fix | Delete
import re
[91] Fix | Delete
import socket
[92] Fix | Delete
import string
[93] Fix | Delete
import sys
[94] Fix | Delete
import time
[95] Fix | Delete
import tempfile
[96] Fix | Delete
import contextlib
[97] Fix | Delete
import warnings
[98] Fix | Delete
[99] Fix | Delete
[100] Fix | Delete
from urllib.error import URLError, HTTPError, ContentTooShortError
[101] Fix | Delete
from urllib.parse import (
[102] Fix | Delete
urlparse, urlsplit, urljoin, unwrap, quote, unquote,
[103] Fix | Delete
_splittype, _splithost, _splitport, _splituser, _splitpasswd,
[104] Fix | Delete
_splitattr, _splitquery, _splitvalue, _splittag, _to_bytes,
[105] Fix | Delete
unquote_to_bytes, urlunparse)
[106] Fix | Delete
from urllib.response import addinfourl, addclosehook
[107] Fix | Delete
[108] Fix | Delete
# check for SSL
[109] Fix | Delete
try:
[110] Fix | Delete
import ssl
[111] Fix | Delete
except ImportError:
[112] Fix | Delete
_have_ssl = False
[113] Fix | Delete
else:
[114] Fix | Delete
_have_ssl = True
[115] Fix | Delete
[116] Fix | Delete
__all__ = [
[117] Fix | Delete
# Classes
[118] Fix | Delete
'Request', 'OpenerDirector', 'BaseHandler', 'HTTPDefaultErrorHandler',
[119] Fix | Delete
'HTTPRedirectHandler', 'HTTPCookieProcessor', 'ProxyHandler',
[120] Fix | Delete
'HTTPPasswordMgr', 'HTTPPasswordMgrWithDefaultRealm',
[121] Fix | Delete
'HTTPPasswordMgrWithPriorAuth', 'AbstractBasicAuthHandler',
[122] Fix | Delete
'HTTPBasicAuthHandler', 'ProxyBasicAuthHandler', 'AbstractDigestAuthHandler',
[123] Fix | Delete
'HTTPDigestAuthHandler', 'ProxyDigestAuthHandler', 'HTTPHandler',
[124] Fix | Delete
'FileHandler', 'FTPHandler', 'CacheFTPHandler', 'DataHandler',
[125] Fix | Delete
'UnknownHandler', 'HTTPErrorProcessor',
[126] Fix | Delete
# Functions
[127] Fix | Delete
'urlopen', 'install_opener', 'build_opener',
[128] Fix | Delete
'pathname2url', 'url2pathname', 'getproxies',
[129] Fix | Delete
# Legacy interface
[130] Fix | Delete
'urlretrieve', 'urlcleanup', 'URLopener', 'FancyURLopener',
[131] Fix | Delete
]
[132] Fix | Delete
[133] Fix | Delete
# used in User-Agent header sent
[134] Fix | Delete
__version__ = '%d.%d' % sys.version_info[:2]
[135] Fix | Delete
[136] Fix | Delete
_opener = None
[137] Fix | Delete
def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
[138] Fix | Delete
*, cafile=None, capath=None, cadefault=False, context=None):
[139] Fix | Delete
'''Open the URL url, which can be either a string or a Request object.
[140] Fix | Delete
[141] Fix | Delete
*data* must be an object specifying additional data to be sent to
[142] Fix | Delete
the server, or None if no such data is needed. See Request for
[143] Fix | Delete
details.
[144] Fix | Delete
[145] Fix | Delete
urllib.request module uses HTTP/1.1 and includes a "Connection:close"
[146] Fix | Delete
header in its HTTP requests.
[147] Fix | Delete
[148] Fix | Delete
The optional *timeout* parameter specifies a timeout in seconds for
[149] Fix | Delete
blocking operations like the connection attempt (if not specified, the
[150] Fix | Delete
global default timeout setting will be used). This only works for HTTP,
[151] Fix | Delete
HTTPS and FTP connections.
[152] Fix | Delete
[153] Fix | Delete
If *context* is specified, it must be a ssl.SSLContext instance describing
[154] Fix | Delete
the various SSL options. See HTTPSConnection for more details.
[155] Fix | Delete
[156] Fix | Delete
The optional *cafile* and *capath* parameters specify a set of trusted CA
[157] Fix | Delete
certificates for HTTPS requests. cafile should point to a single file
[158] Fix | Delete
containing a bundle of CA certificates, whereas capath should point to a
[159] Fix | Delete
directory of hashed certificate files. More information can be found in
[160] Fix | Delete
ssl.SSLContext.load_verify_locations().
[161] Fix | Delete
[162] Fix | Delete
The *cadefault* parameter is ignored.
[163] Fix | Delete
[164] Fix | Delete
This function always returns an object which can work as a context
[165] Fix | Delete
manager and has methods such as
[166] Fix | Delete
[167] Fix | Delete
* geturl() - return the URL of the resource retrieved, commonly used to
[168] Fix | Delete
determine if a redirect was followed
[169] Fix | Delete
[170] Fix | Delete
* info() - return the meta-information of the page, such as headers, in the
[171] Fix | Delete
form of an email.message_from_string() instance (see Quick Reference to
[172] Fix | Delete
HTTP Headers)
[173] Fix | Delete
[174] Fix | Delete
* getcode() - return the HTTP status code of the response. Raises URLError
[175] Fix | Delete
on errors.
[176] Fix | Delete
[177] Fix | Delete
For HTTP and HTTPS URLs, this function returns a http.client.HTTPResponse
[178] Fix | Delete
object slightly modified. In addition to the three new methods above, the
[179] Fix | Delete
msg attribute contains the same information as the reason attribute ---
[180] Fix | Delete
the reason phrase returned by the server --- instead of the response
[181] Fix | Delete
headers as it is specified in the documentation for HTTPResponse.
[182] Fix | Delete
[183] Fix | Delete
For FTP, file, and data URLs and requests explicitly handled by legacy
[184] Fix | Delete
URLopener and FancyURLopener classes, this function returns a
[185] Fix | Delete
urllib.response.addinfourl object.
[186] Fix | Delete
[187] Fix | Delete
Note that None may be returned if no handler handles the request (though
[188] Fix | Delete
the default installed global OpenerDirector uses UnknownHandler to ensure
[189] Fix | Delete
this never happens).
[190] Fix | Delete
[191] Fix | Delete
In addition, if proxy settings are detected (for example, when a *_proxy
[192] Fix | Delete
environment variable like http_proxy is set), ProxyHandler is default
[193] Fix | Delete
installed and makes sure the requests are handled through the proxy.
[194] Fix | Delete
[195] Fix | Delete
'''
[196] Fix | Delete
global _opener
[197] Fix | Delete
if cafile or capath or cadefault:
[198] Fix | Delete
import warnings
[199] Fix | Delete
warnings.warn("cafile, capath and cadefault are deprecated, use a "
[200] Fix | Delete
"custom context instead.", DeprecationWarning, 2)
[201] Fix | Delete
if context is not None:
[202] Fix | Delete
raise ValueError(
[203] Fix | Delete
"You can't pass both context and any of cafile, capath, and "
[204] Fix | Delete
"cadefault"
[205] Fix | Delete
)
[206] Fix | Delete
if not _have_ssl:
[207] Fix | Delete
raise ValueError('SSL support not available')
[208] Fix | Delete
context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH,
[209] Fix | Delete
cafile=cafile,
[210] Fix | Delete
capath=capath)
[211] Fix | Delete
https_handler = HTTPSHandler(context=context)
[212] Fix | Delete
opener = build_opener(https_handler)
[213] Fix | Delete
elif context:
[214] Fix | Delete
https_handler = HTTPSHandler(context=context)
[215] Fix | Delete
opener = build_opener(https_handler)
[216] Fix | Delete
elif _opener is None:
[217] Fix | Delete
_opener = opener = build_opener()
[218] Fix | Delete
else:
[219] Fix | Delete
opener = _opener
[220] Fix | Delete
return opener.open(url, data, timeout)
[221] Fix | Delete
[222] Fix | Delete
def install_opener(opener):
[223] Fix | Delete
global _opener
[224] Fix | Delete
_opener = opener
[225] Fix | Delete
[226] Fix | Delete
_url_tempfiles = []
[227] Fix | Delete
def urlretrieve(url, filename=None, reporthook=None, data=None):
[228] Fix | Delete
"""
[229] Fix | Delete
Retrieve a URL into a temporary location on disk.
[230] Fix | Delete
[231] Fix | Delete
Requires a URL argument. If a filename is passed, it is used as
[232] Fix | Delete
the temporary file location. The reporthook argument should be
[233] Fix | Delete
a callable that accepts a block number, a read size, and the
[234] Fix | Delete
total file size of the URL target. The data argument should be
[235] Fix | Delete
valid URL encoded data.
[236] Fix | Delete
[237] Fix | Delete
If a filename is passed and the URL points to a local resource,
[238] Fix | Delete
the result is a copy from local file to new file.
[239] Fix | Delete
[240] Fix | Delete
Returns a tuple containing the path to the newly created
[241] Fix | Delete
data file as well as the resulting HTTPMessage object.
[242] Fix | Delete
"""
[243] Fix | Delete
url_type, path = _splittype(url)
[244] Fix | Delete
[245] Fix | Delete
with contextlib.closing(urlopen(url, data)) as fp:
[246] Fix | Delete
headers = fp.info()
[247] Fix | Delete
[248] Fix | Delete
# Just return the local path and the "headers" for file://
[249] Fix | Delete
# URLs. No sense in performing a copy unless requested.
[250] Fix | Delete
if url_type == "file" and not filename:
[251] Fix | Delete
return os.path.normpath(path), headers
[252] Fix | Delete
[253] Fix | Delete
# Handle temporary file setup.
[254] Fix | Delete
if filename:
[255] Fix | Delete
tfp = open(filename, 'wb')
[256] Fix | Delete
else:
[257] Fix | Delete
tfp = tempfile.NamedTemporaryFile(delete=False)
[258] Fix | Delete
filename = tfp.name
[259] Fix | Delete
_url_tempfiles.append(filename)
[260] Fix | Delete
[261] Fix | Delete
with tfp:
[262] Fix | Delete
result = filename, headers
[263] Fix | Delete
bs = 1024*8
[264] Fix | Delete
size = -1
[265] Fix | Delete
read = 0
[266] Fix | Delete
blocknum = 0
[267] Fix | Delete
if "content-length" in headers:
[268] Fix | Delete
size = int(headers["Content-Length"])
[269] Fix | Delete
[270] Fix | Delete
if reporthook:
[271] Fix | Delete
reporthook(blocknum, bs, size)
[272] Fix | Delete
[273] Fix | Delete
while True:
[274] Fix | Delete
block = fp.read(bs)
[275] Fix | Delete
if not block:
[276] Fix | Delete
break
[277] Fix | Delete
read += len(block)
[278] Fix | Delete
tfp.write(block)
[279] Fix | Delete
blocknum += 1
[280] Fix | Delete
if reporthook:
[281] Fix | Delete
reporthook(blocknum, bs, size)
[282] Fix | Delete
[283] Fix | Delete
if size >= 0 and read < size:
[284] Fix | Delete
raise ContentTooShortError(
[285] Fix | Delete
"retrieval incomplete: got only %i out of %i bytes"
[286] Fix | Delete
% (read, size), result)
[287] Fix | Delete
[288] Fix | Delete
return result
[289] Fix | Delete
[290] Fix | Delete
def urlcleanup():
[291] Fix | Delete
"""Clean up temporary files from urlretrieve calls."""
[292] Fix | Delete
for temp_file in _url_tempfiles:
[293] Fix | Delete
try:
[294] Fix | Delete
os.unlink(temp_file)
[295] Fix | Delete
except OSError:
[296] Fix | Delete
pass
[297] Fix | Delete
[298] Fix | Delete
del _url_tempfiles[:]
[299] Fix | Delete
global _opener
[300] Fix | Delete
if _opener:
[301] Fix | Delete
_opener = None
[302] Fix | Delete
[303] Fix | Delete
# copied from cookielib.py
[304] Fix | Delete
_cut_port_re = re.compile(r":\d+$", re.ASCII)
[305] Fix | Delete
def request_host(request):
[306] Fix | Delete
"""Return request-host, as defined by RFC 2965.
[307] Fix | Delete
[308] Fix | Delete
Variation from RFC: returned value is lowercased, for convenient
[309] Fix | Delete
comparison.
[310] Fix | Delete
[311] Fix | Delete
"""
[312] Fix | Delete
url = request.full_url
[313] Fix | Delete
host = urlparse(url)[1]
[314] Fix | Delete
if host == "":
[315] Fix | Delete
host = request.get_header("Host", "")
[316] Fix | Delete
[317] Fix | Delete
# remove port, if present
[318] Fix | Delete
host = _cut_port_re.sub("", host, 1)
[319] Fix | Delete
return host.lower()
[320] Fix | Delete
[321] Fix | Delete
class Request:
[322] Fix | Delete
[323] Fix | Delete
def __init__(self, url, data=None, headers={},
[324] Fix | Delete
origin_req_host=None, unverifiable=False,
[325] Fix | Delete
method=None):
[326] Fix | Delete
self.full_url = url
[327] Fix | Delete
self.headers = {}
[328] Fix | Delete
self.unredirected_hdrs = {}
[329] Fix | Delete
self._data = None
[330] Fix | Delete
self.data = data
[331] Fix | Delete
self._tunnel_host = None
[332] Fix | Delete
for key, value in headers.items():
[333] Fix | Delete
self.add_header(key, value)
[334] Fix | Delete
if origin_req_host is None:
[335] Fix | Delete
origin_req_host = request_host(self)
[336] Fix | Delete
self.origin_req_host = origin_req_host
[337] Fix | Delete
self.unverifiable = unverifiable
[338] Fix | Delete
if method:
[339] Fix | Delete
self.method = method
[340] Fix | Delete
[341] Fix | Delete
@property
[342] Fix | Delete
def full_url(self):
[343] Fix | Delete
if self.fragment:
[344] Fix | Delete
return '{}#{}'.format(self._full_url, self.fragment)
[345] Fix | Delete
return self._full_url
[346] Fix | Delete
[347] Fix | Delete
@full_url.setter
[348] Fix | Delete
def full_url(self, url):
[349] Fix | Delete
# unwrap('<URL:type://host/path>') --> 'type://host/path'
[350] Fix | Delete
self._full_url = unwrap(url)
[351] Fix | Delete
self._full_url, self.fragment = _splittag(self._full_url)
[352] Fix | Delete
self._parse()
[353] Fix | Delete
[354] Fix | Delete
@full_url.deleter
[355] Fix | Delete
def full_url(self):
[356] Fix | Delete
self._full_url = None
[357] Fix | Delete
self.fragment = None
[358] Fix | Delete
self.selector = ''
[359] Fix | Delete
[360] Fix | Delete
@property
[361] Fix | Delete
def data(self):
[362] Fix | Delete
return self._data
[363] Fix | Delete
[364] Fix | Delete
@data.setter
[365] Fix | Delete
def data(self, data):
[366] Fix | Delete
if data != self._data:
[367] Fix | Delete
self._data = data
[368] Fix | Delete
# issue 16464
[369] Fix | Delete
# if we change data we need to remove content-length header
[370] Fix | Delete
# (cause it's most probably calculated for previous value)
[371] Fix | Delete
if self.has_header("Content-length"):
[372] Fix | Delete
self.remove_header("Content-length")
[373] Fix | Delete
[374] Fix | Delete
@data.deleter
[375] Fix | Delete
def data(self):
[376] Fix | Delete
self.data = None
[377] Fix | Delete
[378] Fix | Delete
def _parse(self):
[379] Fix | Delete
self.type, rest = _splittype(self._full_url)
[380] Fix | Delete
if self.type is None:
[381] Fix | Delete
raise ValueError("unknown url type: %r" % self.full_url)
[382] Fix | Delete
self.host, self.selector = _splithost(rest)
[383] Fix | Delete
if self.host:
[384] Fix | Delete
self.host = unquote(self.host)
[385] Fix | Delete
[386] Fix | Delete
def get_method(self):
[387] Fix | Delete
"""Return a string indicating the HTTP request method."""
[388] Fix | Delete
default_method = "POST" if self.data is not None else "GET"
[389] Fix | Delete
return getattr(self, 'method', default_method)
[390] Fix | Delete
[391] Fix | Delete
def get_full_url(self):
[392] Fix | Delete
return self.full_url
[393] Fix | Delete
[394] Fix | Delete
def set_proxy(self, host, type):
[395] Fix | Delete
if self.type == 'https' and not self._tunnel_host:
[396] Fix | Delete
self._tunnel_host = self.host
[397] Fix | Delete
else:
[398] Fix | Delete
self.type= type
[399] Fix | Delete
self.selector = self.full_url
[400] Fix | Delete
self.host = host
[401] Fix | Delete
[402] Fix | Delete
def has_proxy(self):
[403] Fix | Delete
return self.selector == self.full_url
[404] Fix | Delete
[405] Fix | Delete
def add_header(self, key, val):
[406] Fix | Delete
# useful for something like authentication
[407] Fix | Delete
self.headers[key.capitalize()] = val
[408] Fix | Delete
[409] Fix | Delete
def add_unredirected_header(self, key, val):
[410] Fix | Delete
# will not be added to a redirected request
[411] Fix | Delete
self.unredirected_hdrs[key.capitalize()] = val
[412] Fix | Delete
[413] Fix | Delete
def has_header(self, header_name):
[414] Fix | Delete
return (header_name in self.headers or
[415] Fix | Delete
header_name in self.unredirected_hdrs)
[416] Fix | Delete
[417] Fix | Delete
def get_header(self, header_name, default=None):
[418] Fix | Delete
return self.headers.get(
[419] Fix | Delete
header_name,
[420] Fix | Delete
self.unredirected_hdrs.get(header_name, default))
[421] Fix | Delete
[422] Fix | Delete
def remove_header(self, header_name):
[423] Fix | Delete
self.headers.pop(header_name, None)
[424] Fix | Delete
self.unredirected_hdrs.pop(header_name, None)
[425] Fix | Delete
[426] Fix | Delete
def header_items(self):
[427] Fix | Delete
hdrs = {**self.unredirected_hdrs, **self.headers}
[428] Fix | Delete
return list(hdrs.items())
[429] Fix | Delete
[430] Fix | Delete
class OpenerDirector:
[431] Fix | Delete
def __init__(self):
[432] Fix | Delete
client_version = "Python-urllib/%s" % __version__
[433] Fix | Delete
self.addheaders = [('User-agent', client_version)]
[434] Fix | Delete
# self.handlers is retained only for backward compatibility
[435] Fix | Delete
self.handlers = []
[436] Fix | Delete
# manage the individual handlers
[437] Fix | Delete
self.handle_open = {}
[438] Fix | Delete
self.handle_error = {}
[439] Fix | Delete
self.process_response = {}
[440] Fix | Delete
self.process_request = {}
[441] Fix | Delete
[442] Fix | Delete
def add_handler(self, handler):
[443] Fix | Delete
if not hasattr(handler, "add_parent"):
[444] Fix | Delete
raise TypeError("expected BaseHandler instance, got %r" %
[445] Fix | Delete
type(handler))
[446] Fix | Delete
[447] Fix | Delete
added = False
[448] Fix | Delete
for meth in dir(handler):
[449] Fix | Delete
if meth in ["redirect_request", "do_open", "proxy_open"]:
[450] Fix | Delete
# oops, coincidental match
[451] Fix | Delete
continue
[452] Fix | Delete
[453] Fix | Delete
i = meth.find("_")
[454] Fix | Delete
protocol = meth[:i]
[455] Fix | Delete
condition = meth[i+1:]
[456] Fix | Delete
[457] Fix | Delete
if condition.startswith("error"):
[458] Fix | Delete
j = condition.find("_") + i + 1
[459] Fix | Delete
kind = meth[j+1:]
[460] Fix | Delete
try:
[461] Fix | Delete
kind = int(kind)
[462] Fix | Delete
except ValueError:
[463] Fix | Delete
pass
[464] Fix | Delete
lookup = self.handle_error.get(protocol, {})
[465] Fix | Delete
self.handle_error[protocol] = lookup
[466] Fix | Delete
elif condition == "open":
[467] Fix | Delete
kind = protocol
[468] Fix | Delete
lookup = self.handle_open
[469] Fix | Delete
elif condition == "response":
[470] Fix | Delete
kind = protocol
[471] Fix | Delete
lookup = self.process_response
[472] Fix | Delete
elif condition == "request":
[473] Fix | Delete
kind = protocol
[474] Fix | Delete
lookup = self.process_request
[475] Fix | Delete
else:
[476] Fix | Delete
continue
[477] Fix | Delete
[478] Fix | Delete
handlers = lookup.setdefault(kind, [])
[479] Fix | Delete
if handlers:
[480] Fix | Delete
bisect.insort(handlers, handler)
[481] Fix | Delete
else:
[482] Fix | Delete
handlers.append(handler)
[483] Fix | Delete
added = True
[484] Fix | Delete
[485] Fix | Delete
if added:
[486] Fix | Delete
bisect.insort(self.handlers, handler)
[487] Fix | Delete
handler.add_parent(self)
[488] Fix | Delete
[489] Fix | Delete
def close(self):
[490] Fix | Delete
# Only exists for backwards compatibility.
[491] Fix | Delete
pass
[492] Fix | Delete
[493] Fix | Delete
def _call_chain(self, chain, kind, meth_name, *args):
[494] Fix | Delete
# Handlers raise an exception if no one else should try to handle
[495] Fix | Delete
# the request, or return None if they can't but another handler
[496] Fix | Delete
# could. Otherwise, they return the response.
[497] Fix | Delete
handlers = chain.get(kind, ())
[498] Fix | Delete
for handler in handlers:
[499] Fix | Delete
It is recommended that you Edit text format, this type of Fix handles quite a lot in one request
Function