self = <urllib.request.HTTPHandler object at 0x7ffaa8dfc4a8>
http_class = <class 'http.client.HTTPConnection'>
req = <urllib.request.Request object at 0x7ffaa46c0f98>, http_conn_args = {}
host = 'indexes', h = <http.client.HTTPConnection object at 0x7ffaa46c0438>
def do_open(self, http_class, req, **http_conn_args):
"""Return an HTTPResponse object for the request, using http_class.
http_class must implement the HTTPConnection API from http.client.
"""
host = req.host
if not host:
raise URLError('no host given')
# will parse host:port
h = http_class(host, timeout=req.timeout, **http_conn_args)
h.set_debuglevel(self._debuglevel)
headers = dict(req.unredirected_hdrs)
headers.update({k: v for k, v in req.headers.items()
if k not in headers})
# TODO(jhylton): Should this be redesigned to handle
# persistent connections?
# We want to make an HTTP/1.1 request, but the addinfourl
# class isn't prepared to deal with a persistent connection.
# It will try to read all remaining data from the socket,
# which will block while the server waits for the next request.
# So make sure the connection gets closed after the (only)
# request.
headers["Connection"] = "close"
headers = {name.title(): val for name, val in headers.items()}
if req._tunnel_host:
tunnel_headers = {}
proxy_auth_hdr = "Proxy-Authorization"
if proxy_auth_hdr in headers:
tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
# Proxy-Authorization should not be sent to origin
# server.
del headers[proxy_auth_hdr]
h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
try:
try:
h.request(req.get_method(), req.selector, req.data, headers,
> encode_chunked=req.has_header('Transfer-encoding'))
/usr/lib/python3.7/urllib/request.py:1324:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <http.client.HTTPConnection object at 0x7ffaa46c0438>, method = 'GET'
url = '/export.fld', body = None
headers = {'Connection': 'close', 'Host': 'indexes', 'User-Agent': 'Python-urllib/3.7'}
def request(self, method, url, body=None, headers={}, *,
encode_chunked=False):
"""Send a complete request to the server."""
> self._send_request(method, url, body, headers, encode_chunked)
/usr/lib/python3.7/http/client.py:1260:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <http.client.HTTPConnection object at 0x7ffaa46c0438>, method = 'GET'
url = '/export.fld', body = None
headers = {'Connection': 'close', 'Host': 'indexes', 'User-Agent': 'Python-urllib/3.7'}
encode_chunked = False
def _send_request(self, method, url, body, headers, encode_chunked):
# Honor explicitly requested Host: and Accept-Encoding: headers.
header_names = frozenset(k.lower() for k in headers)
skips = {}
if 'host' in header_names:
skips['skip_host'] = 1
if 'accept-encoding' in header_names:
skips['skip_accept_encoding'] = 1
self.putrequest(method, url, **skips)
# chunked encoding will happen if HTTP/1.1 is used and either
# the caller passes encode_chunked=True or the following
# conditions hold:
# 1. content-length has not been explicitly set
# 2. the body is a file or iterable, but not a str or bytes-like
# 3. Transfer-Encoding has NOT been explicitly set by the caller
if 'content-length' not in header_names:
# only chunk body if not explicitly set for backwards
# compatibility, assuming the client code is already handling the
# chunking
if 'transfer-encoding' not in header_names:
# if content-length cannot be automatically determined, fall
# back to chunked encoding
encode_chunked = False
content_length = self._get_content_length(body, method)
if content_length is None:
if body is not None:
if self.debuglevel > 0:
print('Unable to determine size of %r' % body)
encode_chunked = True
self.putheader('Transfer-Encoding', 'chunked')
else:
self.putheader('Content-Length', str(content_length))
else:
encode_chunked = False
for hdr, value in headers.items():
self.putheader(hdr, value)
if isinstance(body, str):
# RFC 2616 Section 3.7.1 says that text default has a
# default charset of iso-8859-1.
body = _encode(body, 'body')
> self.endheaders(body, encode_chunked=encode_chunked)
/usr/lib/python3.7/http/client.py:1306:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <http.client.HTTPConnection object at 0x7ffaa46c0438>
message_body = None
def endheaders(self, message_body=None, *, encode_chunked=False):
"""Indicate that the last header line has been sent to the server.
This method sends the request to the server. The optional message_body
argument can be used to pass a message body associated with the
request.
"""
if self.__state == _CS_REQ_STARTED:
self.__state = _CS_REQ_SENT
else:
raise CannotSendHeader()
> self._send_output(message_body, encode_chunked=encode_chunked)
/usr/lib/python3.7/http/client.py:1255:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <http.client.HTTPConnection object at 0x7ffaa46c0438>
message_body = None, encode_chunked = False
def _send_output(self, message_body=None, encode_chunked=False):
"""Send the currently buffered request and clear the buffer.
Appends an extra \\r\\n to the buffer.
A message_body may be specified, to be appended to the request.
"""
self._buffer.extend((b"", b""))
msg = b"\r\n".join(self._buffer)
del self._buffer[:]
> self.send(msg)
/usr/lib/python3.7/http/client.py:1030:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <http.client.HTTPConnection object at 0x7ffaa46c0438>
data = b'GET /export.fld HTTP/1.1\r\nAccept-Encoding: identity\r\nHost: indexes\r\nUser-Agent: Python-urllib/3.7\r\nConnection: close\r\n\r\n'
def send(self, data):
"""Send `data' to the server.
``data`` can be a string object, a bytes object, an array object, a
file-like object that supports a .read() method, or an iterable object.
"""
if self.sock is None:
if self.auto_open:
> self.connect()
/usr/lib/python3.7/http/client.py:970:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <http.client.HTTPConnection object at 0x7ffaa46c0438>
def connect(self):
"""Connect to the host and port specified in __init__."""
self.sock = self._create_connection(
> (self.host,self.port), self.timeout, self.source_address)
/usr/lib/python3.7/http/client.py:942:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
address = ('indexes', 80), timeout = <object object at 0x7ffaab57c390>
source_address = None
def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT,
source_address=None):
"""Connect to *address* and return the socket object.
Convenience function. Connect to *address* (a 2-tuple ``(host,
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
global default timeout setting returned by :func:`getdefaulttimeout`
is used. If *source_address* is set it must be a tuple of (host, port)
for the socket to bind as a source address before making the connection.
A host of '' or port 0 tells the OS to use the default.
"""
host, port = address
err = None
> for res in getaddrinfo(host, port, 0, SOCK_STREAM):
/usr/lib/python3.7/socket.py:707:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
host = 'indexes', port = 80, family = 0, type = <SocketKind.SOCK_STREAM: 1>
proto = 0, flags = 0
def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0):
"""Resolve host and port into list of address info entries.
Translate the host/port argument into a sequence of 5-tuples that contain
all the necessary arguments for creating a socket connected to that service.
host is a domain name, a string representation of an IPv4/v6 address or
None. port is a string service name such as 'http', a numeric port number or
None. By passing None as the value of host and port, you can pass NULL to
the underlying C API.
The family, type and proto arguments can be optionally specified in order to
narrow the list of addresses returned. Passing zero as a value for each of
these arguments selects the full range of results.
"""
# We override this function since we want to translate the numeric family
# and socket type values to enum constants.
addrlist = []
> for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
E socket.gaierror: [Errno -2] Name or service not known
/usr/lib/python3.7/socket.py:748: gaierror
During handling of the above exception, another exception occurred:
swh_scheduler = <swh.scheduler.backend.SchedulerBackend object at 0x7ffaa8d61b38>
requests_mock = <requests_mock.mocker.Mocker object at 0x7ffaa8d61320>
http_code = 500
@pytest.mark.parametrize("http_code", [400, 500, 502])
def test_maven_list_http_error(swh_scheduler, requests_mock, http_code):
"""Test handling of some HTTP errors commonly encountered"""
lister = MavenLister(
scheduler=swh_scheduler, url=MVN_URL, index_url="http://indexes/export.fld"
)
requests_mock.get(INDEX_URL, text=maven_index)
requests_mock.get(URL_POM_1, status_code=http_code)
with pytest.raises(requests.HTTPError):
> lister.run()
.tox/py3/lib/python3.7/site-packages/swh/lister/maven/tests/test_lister.py:110:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox/py3/lib/python3.7/site-packages/swh/lister/pattern.py:127: in run
for page in self.get_pages():
.tox/py3/lib/python3.7/site-packages/swh/lister/maven/lister.py:109: in get_pages
with request.urlopen(self.INDEX_URL) as fsrc:
/usr/lib/python3.7/urllib/request.py:222: in urlopen
return opener.open(url, data, timeout)
/usr/lib/python3.7/urllib/request.py:525: in open
response = self._open(req, data)
/usr/lib/python3.7/urllib/request.py:543: in _open
'_open', req)
/usr/lib/python3.7/urllib/request.py:503: in _call_chain
result = func(*args)
/usr/lib/python3.7/urllib/request.py:1352: in http_open
return self.do_open(http.client.HTTPConnection, req)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <urllib.request.HTTPHandler object at 0x7ffaa8dfc4a8>
http_class = <class 'http.client.HTTPConnection'>
req = <urllib.request.Request object at 0x7ffaa46c0f98>, http_conn_args = {}
host = 'indexes', h = <http.client.HTTPConnection object at 0x7ffaa46c0438>
def do_open(self, http_class, req, **http_conn_args):
"""Return an HTTPResponse object for the request, using http_class.
http_class must implement the HTTPConnection API from http.client.
"""
host = req.host
if not host:
raise URLError('no host given')
# will parse host:port
h = http_class(host, timeout=req.timeout, **http_conn_args)
h.set_debuglevel(self._debuglevel)
headers = dict(req.unredirected_hdrs)
headers.update({k: v for k, v in req.headers.items()
if k not in headers})
# TODO(jhylton): Should this be redesigned to handle
# persistent connections?
# We want to make an HTTP/1.1 request, but the addinfourl
# class isn't prepared to deal with a persistent connection.
# It will try to read all remaining data from the socket,
# which will block while the server waits for the next request.
# So make sure the connection gets closed after the (only)
# request.
headers["Connection"] = "close"
headers = {name.title(): val for name, val in headers.items()}
if req._tunnel_host:
tunnel_headers = {}
proxy_auth_hdr = "Proxy-Authorization"
if proxy_auth_hdr in headers:
tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
# Proxy-Authorization should not be sent to origin
# server.
del headers[proxy_auth_hdr]
h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
try:
try:
h.request(req.get_method(), req.selector, req.data, headers,
encode_chunked=req.has_header('Transfer-encoding'))
except OSError as err: # timeout error
> raise URLError(err)
E urllib.error.URLError: <urlopen error [Errno -2] Name or service not known>
/usr/lib/python3.7/urllib/request.py:1326: URLError
TEST RESULT
TEST RESULT
- Run At
- Aug 26 2021, 9:18 PM