Skip to content
Failed

Console Output

Skipping 38 KB.. Full Log
    
        # Trigger any extra validation we need to do.
        try:
>           self._validate_conn(conn)

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/connectionpool.py:382: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fdbb6afdf70>
conn = <urllib3.connection.HTTPSConnection object at 0x7fdbb433d370>

    def _validate_conn(self, conn):
        """
        Called right before a request is made, after the socket is created.
        """
        super(HTTPSConnectionPool, self)._validate_conn(conn)
    
        # Force connect early to allow us to validate the connection.
        if not getattr(conn, "sock", None):  # AppEngine might not have  `.sock`
>           conn.connect()

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/connectionpool.py:1010: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connection.HTTPSConnection object at 0x7fdbb433d370>

    def connect(self):
        # Add certificate verification
        conn = self._new_conn()
        hostname = self.host
        tls_in_tls = False
    
        if self._is_using_tunnel():
            if self.tls_in_tls_required:
                conn = self._connect_tls_proxy(hostname, conn)
                tls_in_tls = True
    
            self.sock = conn
    
            # Calls self._set_hostport(), so self.host is
            # self._tunnel_host below.
            self._tunnel()
            # Mark this connection as not reusable
            self.auto_open = 0
    
            # Override the host with the one we're requesting data from.
            hostname = self._tunnel_host
    
        server_hostname = hostname
        if self.server_hostname is not None:
            server_hostname = self.server_hostname
    
        is_time_off = datetime.date.today() < RECENT_DATE
        if is_time_off:
            warnings.warn(
                (
                    "System time is way off (before {0}). This will probably "
                    "lead to SSL verification errors"
                ).format(RECENT_DATE),
                SystemTimeWarning,
            )
    
        # Wrap socket using verification with the root certs in
        # trusted_root_certs
        default_ssl_context = False
        if self.ssl_context is None:
            default_ssl_context = True
            self.ssl_context = create_urllib3_context(
                ssl_version=resolve_ssl_version(self.ssl_version),
                cert_reqs=resolve_cert_reqs(self.cert_reqs),
            )
    
        context = self.ssl_context
        context.verify_mode = resolve_cert_reqs(self.cert_reqs)
    
        # Try to load OS default certs if none are given.
        # Works well on Windows (requires Python3.4+)
        if (
            not self.ca_certs
            and not self.ca_cert_dir
            and not self.ca_cert_data
            and default_ssl_context
            and hasattr(context, "load_default_certs")
        ):
            context.load_default_certs()
    
>       self.sock = ssl_wrap_socket(
            sock=conn,
            keyfile=self.key_file,
            certfile=self.cert_file,
            key_password=self.key_password,
            ca_certs=self.ca_certs,
            ca_cert_dir=self.ca_cert_dir,
            ca_cert_data=self.ca_cert_data,
            server_hostname=server_hostname,
            ssl_context=context,
            tls_in_tls=tls_in_tls,
        )

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/connection.py:411: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
keyfile = None, certfile = None, cert_reqs = None
ca_certs = '/home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/certifi/cacert.pem'
server_hostname = 'sparc.biolucida.net', ssl_version = None, ciphers = None
ssl_context = <ssl.SSLContext object at 0x7fdbb685b440>, ca_cert_dir = None
key_password = None, ca_cert_data = None, tls_in_tls = False

    def ssl_wrap_socket(
        sock,
        keyfile=None,
        certfile=None,
        cert_reqs=None,
        ca_certs=None,
        server_hostname=None,
        ssl_version=None,
        ciphers=None,
        ssl_context=None,
        ca_cert_dir=None,
        key_password=None,
        ca_cert_data=None,
        tls_in_tls=False,
    ):
        """
        All arguments except for server_hostname, ssl_context, and ca_cert_dir have
        the same meaning as they do when using :func:`ssl.wrap_socket`.
    
        :param server_hostname:
            When SNI is supported, the expected hostname of the certificate
        :param ssl_context:
            A pre-made :class:`SSLContext` object. If none is provided, one will
            be created using :func:`create_urllib3_context`.
        :param ciphers:
            A string of ciphers we wish the client to support.
        :param ca_cert_dir:
            A directory containing CA certificates in multiple separate files, as
            supported by OpenSSL's -CApath flag or the capath argument to
            SSLContext.load_verify_locations().
        :param key_password:
            Optional password if the keyfile is encrypted.
        :param ca_cert_data:
            Optional string containing CA certificates in PEM format suitable for
            passing as the cadata parameter to SSLContext.load_verify_locations()
        :param tls_in_tls:
            Use SSLTransport to wrap the existing socket.
        """
        context = ssl_context
        if context is None:
            # Note: This branch of code and all the variables in it are no longer
            # used by urllib3 itself. We should consider deprecating and removing
            # this code.
            context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
    
        if ca_certs or ca_cert_dir or ca_cert_data:
            try:
                context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
            except (IOError, OSError) as e:
                raise SSLError(e)
    
        elif ssl_context is None and hasattr(context, "load_default_certs"):
            # try to load OS default certs; works well on Windows (require Python3.4+)
            context.load_default_certs()
    
        # Attempt to detect if we get the goofy behavior of the
        # keyfile being encrypted and OpenSSL asking for the
        # passphrase via the terminal and instead error out.
        if keyfile and key_password is None and _is_key_file_encrypted(keyfile):
            raise SSLError("Client private key is encrypted, password is required")
    
        if certfile:
            if key_password is None:
                context.load_cert_chain(certfile, keyfile)
            else:
                context.load_cert_chain(certfile, keyfile, key_password)
    
        try:
            if hasattr(context, "set_alpn_protocols"):
                context.set_alpn_protocols(ALPN_PROTOCOLS)
        except NotImplementedError:
            pass
    
        # If we detect server_hostname is an IP address then the SNI
        # extension should not be used according to RFC3546 Section 3.1
        use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
        # SecureTransport uses server_hostname in certificate verification.
        send_sni = (use_sni_hostname and HAS_SNI) or (
            IS_SECURETRANSPORT and server_hostname
        )
        # Do not warn the user if server_hostname is an invalid SNI hostname.
        if not HAS_SNI and use_sni_hostname:
            warnings.warn(
                "An HTTPS request has been made, but the SNI (Server Name "
                "Indication) extension to TLS is not available on this platform. "
                "This may cause the server to present an incorrect TLS "
                "certificate, which can cause validation failures. You can upgrade to "
                "a newer version of Python to solve this. For more information, see "
                "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
                "#ssl-warnings",
                SNIMissingWarning,
            )
    
        if send_sni:
>           ssl_sock = _ssl_wrap_socket_impl(
                sock, context, tls_in_tls, server_hostname=server_hostname
            )

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/util/ssl_.py:428: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
ssl_context = <ssl.SSLContext object at 0x7fdbb685b440>, tls_in_tls = False
server_hostname = 'sparc.biolucida.net'

    def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None):
        if tls_in_tls:
            if not SSLTransport:
                # Import error, ssl is not available.
                raise ProxySchemeUnsupported(
                    "TLS in TLS requires support for the 'ssl' module"
                )
    
            SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
            return SSLTransport(sock, ssl_context, server_hostname)
    
        if server_hostname:
>           return ssl_context.wrap_socket(sock, server_hostname=server_hostname)

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/util/ssl_.py:472: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ssl.SSLContext object at 0x7fdbb685b440>
sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
server_side = False, do_handshake_on_connect = True, suppress_ragged_eofs = True
server_hostname = 'sparc.biolucida.net', session = None

    def wrap_socket(self, sock, server_side=False,
                    do_handshake_on_connect=True,
                    suppress_ragged_eofs=True,
                    server_hostname=None, session=None):
        # SSLSocket class handles server_hostname encoding before it calls
        # ctx._wrap_socket()
>       return self.sslsocket_class._create(
            sock=sock,
            server_side=server_side,
            do_handshake_on_connect=do_handshake_on_connect,
            suppress_ragged_eofs=suppress_ragged_eofs,
            server_hostname=server_hostname,
            context=self,
            session=session
        )

/usr/lib/python3.9/ssl.py:501: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

cls = <class 'ssl.SSLSocket'>
sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
server_side = False, do_handshake_on_connect = True, suppress_ragged_eofs = True
server_hostname = 'sparc.biolucida.net'
context = <ssl.SSLContext object at 0x7fdbb685b440>, session = None

    @classmethod
    def _create(cls, sock, server_side=False, do_handshake_on_connect=True,
                suppress_ragged_eofs=True, server_hostname=None,
                context=None, session=None):
        if sock.getsockopt(SOL_SOCKET, SO_TYPE) != SOCK_STREAM:
            raise NotImplementedError("only stream sockets are supported")
        if server_side:
            if server_hostname:
                raise ValueError("server_hostname can only be specified "
                                 "in client mode")
            if session is not None:
                raise ValueError("session can only be specified in "
                                 "client mode")
        if context.check_hostname and not server_hostname:
            raise ValueError("check_hostname requires server_hostname")
    
        kwargs = dict(
            family=sock.family, type=sock.type, proto=sock.proto,
            fileno=sock.fileno()
        )
        self = cls.__new__(cls, **kwargs)
        super(SSLSocket, self).__init__(**kwargs)
        sock_timeout = sock.gettimeout()
        sock.detach()
    
        self._context = context
        self._session = session
        self._closed = False
        self._sslobj = None
        self.server_side = server_side
        self.server_hostname = context._encode_hostname(server_hostname)
        self.do_handshake_on_connect = do_handshake_on_connect
        self.suppress_ragged_eofs = suppress_ragged_eofs
    
        # See if we are connected
        try:
            self.getpeername()
        except OSError as e:
            if e.errno != errno.ENOTCONN:
                raise
            connected = False
            blocking = self.getblocking()
            self.setblocking(False)
            try:
                # We are not connected so this is not supposed to block, but
                # testing revealed otherwise on macOS and Windows so we do
                # the non-blocking dance regardless. Our raise when any data
                # is found means consuming the data is harmless.
                notconn_pre_handshake_data = self.recv(1)
            except OSError as e:
                # EINVAL occurs for recv(1) on non-connected on unix sockets.
                if e.errno not in (errno.ENOTCONN, errno.EINVAL):
                    raise
                notconn_pre_handshake_data = b''
            self.setblocking(blocking)
            if notconn_pre_handshake_data:
                # This prevents pending data sent to the socket before it was
                # closed from escaping to the caller who could otherwise
                # presume it came through a successful TLS connection.
                reason = "Closed before TLS handshake with data in recv buffer."
                notconn_pre_handshake_data_error = SSLError(e.errno, reason)
                # Add the SSLError attributes that _ssl.c always adds.
                notconn_pre_handshake_data_error.reason = reason
                notconn_pre_handshake_data_error.library = None
                try:
                    self.close()
                except OSError:
                    pass
                try:
                    raise notconn_pre_handshake_data_error
                finally:
                    # Explicitly break the reference cycle.
                    notconn_pre_handshake_data_error = None
        else:
            connected = True
    
        self.settimeout(sock_timeout)  # Must come after setblocking() calls.
        self._connected = connected
        if connected:
            # create the SSL object
            try:
                self._sslobj = self._context._wrap_socket(
                    self, server_side, self.server_hostname,
                    owner=self, session=self._session,
                )
                if do_handshake_on_connect:
                    timeout = self.gettimeout()
                    if timeout == 0.0:
                        # non-blocking
                        raise ValueError("do_handshake_on_connect should not be specified for non-blocking sockets")
>                   self.do_handshake()

/usr/lib/python3.9/ssl.py:1074: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ssl.SSLSocket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
block = False

    @_sslcopydoc
    def do_handshake(self, block=False):
        self._check_connected()
        timeout = self.gettimeout()
        try:
            if timeout == 0.0 and block:
                self.settimeout(None)
>           self._sslobj.do_handshake()
E           ConnectionResetError: [Errno 104] Connection reset by peer

/usr/lib/python3.9/ssl.py:1343: ConnectionResetError

During handling of the above exception, another exception occurred:

self = <requests.adapters.HTTPAdapter object at 0x7fdbb6ac3bb0>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()

    def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
        """Sends PreparedRequest object. Returns Response object.
    
        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        """
    
        try:
            conn = self.get_connection(request.url, proxies)
        except LocationValueError as e:
            raise InvalidURL(e, request=request)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
    
        chunked = not (request.body is None or 'Content-Length' in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError as e:
                # this may raise a string formatting error.
                err = ("Invalid timeout {}. Pass a (connect, read) "
                       "timeout tuple, or a single float to set "
                       "both timeouts to the same value".format(timeout))
                raise ValueError(err)
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
            if not chunked:
>               resp = conn.urlopen(
                    method=request.method,
                    url=url,
                    body=request.body,
                    headers=request.headers,
                    redirect=False,
                    assert_same_host=False,
                    preload_content=False,
                    decode_content=False,
                    retries=self.max_retries,
                    timeout=timeout
                )

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/requests/adapters.py:439: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fdbb6afdf70>
method = 'GET'
url = '/thumbnail?datasetId=37&version=3&path=files%2Fderivative%2Fsub-54-5%2FTJU_3Scan_ratheart54-5_updated_06_11_19_Fiducials.xml'
body = None
headers = {'User-Agent': 'python-requests/2.25.1', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None
response_kw = {'decode_content': False, 'preload_content': False}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/thumbnail', query='datasetId=37&version=3&path=files%2Fderivative%2Fsub-54-5%2FTJU_3Scan_ratheart54-5_updated_06_11_19_Fiducials.xml', fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False

    def urlopen(
        self,
        method,
        url,
        body=None,
        headers=None,
        retries=None,
        redirect=True,
        assert_same_host=True,
        timeout=_Default,
        pool_timeout=None,
        release_conn=None,
        chunked=False,
        body_pos=None,
        **response_kw
    ):
        """
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.
    
        .. note::
    
           More commonly, it's appropriate to use a convenience method provided
           by :class:`.RequestMethods`, such as :meth:`request`.
    
        .. note::
    
           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.
    
        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)
    
        :param url:
            The URL to perform the request on.
    
        :param body:
            Data to send in the request body, either :class:`str`, :class:`bytes`,
            an iterable of :class:`str`/:class:`bytes`, or a file-like object.
    
        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.
    
        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.
    
            Pass ``None`` to retry until you receive a response. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.
    
            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.
    
        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
    
        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.
    
        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When ``False``, you can
            use the pool on an HTTP proxy and request foreign hosts.
    
        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.
    
        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.
    
        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of
            ``response_kw.get('preload_content', True)``.
    
        :param chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.
    
        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.
    
        :param \\**response_kw:
            Additional parameters are passed to
            :meth:`urllib3.response.HTTPResponse.from_httplib`
        """
    
        parsed_url = parse_url(url)
        destination_scheme = parsed_url.scheme
    
        if headers is None:
            headers = self.headers
    
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
    
        if release_conn is None:
            release_conn = response_kw.get("preload_content", True)
    
        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)
    
        # Ensure that the URL we're connecting to is properly encoded
        if url.startswith("/"):
            url = six.ensure_str(_encode_target(url))
        else:
            url = six.ensure_str(parsed_url.url)
    
        conn = None
    
        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/urllib3/urllib3/issues/651>
        release_this_conn = release_conn
    
        http_tunnel_required = connection_requires_http_tunnel(
            self.proxy, self.proxy_config, destination_scheme
        )
    
        # Merge the proxy headers. Only done when not using HTTP CONNECT. We
        # have to copy the headers dict so we can safely change it without those
        # changes being reflected in anyone else's copy.
        if not http_tunnel_required:
            headers = headers.copy()
            headers.update(self.proxy_headers)
    
        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None
    
        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False
    
        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)
    
        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)
    
            conn.timeout = timeout_obj.connect_timeout
    
            is_new_proxy_conn = self.proxy is not None and not getattr(
                conn, "sock", None
            )
            if is_new_proxy_conn and http_tunnel_required:
                self._prepare_proxy(conn)
    
            # Make the request on the httplib connection object.
            httplib_response = self._make_request(
                conn,
                method,
                url,
                timeout=timeout_obj,
                body=body,
                headers=headers,
                chunked=chunked,
            )
    
            # If we're going to release the connection in ``finally:``, then
            # the response doesn't need to know about the connection. Otherwise
            # it will also try to release it and we'll have a double-release
            # mess.
            response_conn = conn if not release_conn else None
    
            # Pass method to Response for length checking
            response_kw["request_method"] = method
    
            # Import httplib's response into our own wrapper object
            response = self.ResponseCls.from_httplib(
                httplib_response,
                pool=self,
                connection=response_conn,
                retries=retries,
                **response_kw
            )
    
            # Everything went great!
            clean_exit = True
    
        except EmptyPoolError:
            # Didn't get a connection from the pool, no need to clean up
            clean_exit = True
            release_this_conn = False
            raise
    
        except (
            TimeoutError,
            HTTPException,
            SocketError,
            ProtocolError,
            BaseSSLError,
            SSLError,
            CertificateError,
        ) as e:
            # Discard the connection for these exceptions. It will be
            # replaced during the next _get_conn() call.
            clean_exit = False
            if isinstance(e, (BaseSSLError, CertificateError)):
                e = SSLError(e)
            elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
                e = ProxyError("Cannot connect to proxy.", e)
            elif isinstance(e, (SocketError, HTTPException)):
                e = ProtocolError("Connection aborted.", e)
    
>           retries = retries.increment(
                method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
            )

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/connectionpool.py:755: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = Retry(total=0, connect=None, read=False, redirect=None, status=None)
method = 'GET'
url = '/thumbnail?datasetId=37&version=3&path=files%2Fderivative%2Fsub-54-5%2FTJU_3Scan_ratheart54-5_updated_06_11_19_Fiducials.xml'
response = None
error = ProtocolError('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))
_pool = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fdbb6afdf70>
_stacktrace = <traceback object at 0x7fdbbc2864c0>

    def increment(
        self,
        method=None,
        url=None,
        response=None,
        error=None,
        _pool=None,
        _stacktrace=None,
    ):
        """Return a new Retry object with incremented retry counters.
    
        :param response: A response object, or None, if the server did not
            return a response.
        :type response: :class:`~urllib3.response.HTTPResponse`
        :param Exception error: An error encountered during the request, or
            None if the response was received successfully.
    
        :return: A new ``Retry`` object.
        """
        if self.total is False and error:
            # Disabled, indicate to re-raise the error.
            raise six.reraise(type(error), error, _stacktrace)
    
        total = self.total
        if total is not None:
            total -= 1
    
        connect = self.connect
        read = self.read
        redirect = self.redirect
        status_count = self.status
        other = self.other
        cause = "unknown"
        status = None
        redirect_location = None
    
        if error and self._is_connection_error(error):
            # Connect retry?
            if connect is False:
                raise six.reraise(type(error), error, _stacktrace)
            elif connect is not None:
                connect -= 1
    
        elif error and self._is_read_error(error):
            # Read retry?
            if read is False or not self._is_method_retryable(method):
>               raise six.reraise(type(error), error, _stacktrace)

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/util/retry.py:532: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

tp = <class 'urllib3.exceptions.ProtocolError'>, value = None, tb = None

    def reraise(tp, value, tb=None):
        try:
            if value is None:
                value = tp()
            if value.__traceback__ is not tb:
>               raise value.with_traceback(tb)

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/packages/six.py:734: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fdbb6afdf70>
method = 'GET'
url = '/thumbnail?datasetId=37&version=3&path=files%2Fderivative%2Fsub-54-5%2FTJU_3Scan_ratheart54-5_updated_06_11_19_Fiducials.xml'
body = None
headers = {'User-Agent': 'python-requests/2.25.1', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None
response_kw = {'decode_content': False, 'preload_content': False}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/thumbnail', query='datasetId=37&version=3&path=files%2Fderivative%2Fsub-54-5%2FTJU_3Scan_ratheart54-5_updated_06_11_19_Fiducials.xml', fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False

    def urlopen(
        self,
        method,
        url,
        body=None,
        headers=None,
        retries=None,
        redirect=True,
        assert_same_host=True,
        timeout=_Default,
        pool_timeout=None,
        release_conn=None,
        chunked=False,
        body_pos=None,
        **response_kw
    ):
        """
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.
    
        .. note::
    
           More commonly, it's appropriate to use a convenience method provided
           by :class:`.RequestMethods`, such as :meth:`request`.
    
        .. note::
    
           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.
    
        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)
    
        :param url:
            The URL to perform the request on.
    
        :param body:
            Data to send in the request body, either :class:`str`, :class:`bytes`,
            an iterable of :class:`str`/:class:`bytes`, or a file-like object.
    
        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.
    
        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.
    
            Pass ``None`` to retry until you receive a response. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.
    
            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.
    
        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
    
        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.
    
        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When ``False``, you can
            use the pool on an HTTP proxy and request foreign hosts.
    
        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.
    
        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.
    
        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of
            ``response_kw.get('preload_content', True)``.
    
        :param chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.
    
        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.
    
        :param \\**response_kw:
            Additional parameters are passed to
            :meth:`urllib3.response.HTTPResponse.from_httplib`
        """
    
        parsed_url = parse_url(url)
        destination_scheme = parsed_url.scheme
    
        if headers is None:
            headers = self.headers
    
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
    
        if release_conn is None:
            release_conn = response_kw.get("preload_content", True)
    
        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)
    
        # Ensure that the URL we're connecting to is properly encoded
        if url.startswith("/"):
            url = six.ensure_str(_encode_target(url))
        else:
            url = six.ensure_str(parsed_url.url)
    
        conn = None
    
        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/urllib3/urllib3/issues/651>
        release_this_conn = release_conn
    
        http_tunnel_required = connection_requires_http_tunnel(
            self.proxy, self.proxy_config, destination_scheme
        )
    
        # Merge the proxy headers. Only done when not using HTTP CONNECT. We
        # have to copy the headers dict so we can safely change it without those
        # changes being reflected in anyone else's copy.
        if not http_tunnel_required:
            headers = headers.copy()
            headers.update(self.proxy_headers)
    
        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None
    
        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False
    
        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)
    
        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)
    
            conn.timeout = timeout_obj.connect_timeout
    
            is_new_proxy_conn = self.proxy is not None and not getattr(
                conn, "sock", None
            )
            if is_new_proxy_conn and http_tunnel_required:
                self._prepare_proxy(conn)
    
            # Make the request on the httplib connection object.
>           httplib_response = self._make_request(
                conn,
                method,
                url,
                timeout=timeout_obj,
                body=body,
                headers=headers,
                chunked=chunked,
            )

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/connectionpool.py:699: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fdbb6afdf70>
conn = <urllib3.connection.HTTPSConnection object at 0x7fdbb433d370>
method = 'GET'
url = '/thumbnail?datasetId=37&version=3&path=files%2Fderivative%2Fsub-54-5%2FTJU_3Scan_ratheart54-5_updated_06_11_19_Fiducials.xml'
timeout = Timeout(connect=None, read=None, total=None), chunked = False
httplib_request_kw = {'body': None, 'headers': {'User-Agent': 'python-requests/2.25.1', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}}
timeout_obj = Timeout(connect=None, read=None, total=None)

    def _make_request(
        self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
    ):
        """
        Perform a request on a given urllib connection object taken from our
        pool.
    
        :param conn:
            a connection from one of our connection pools
    
        :param timeout:
            Socket timeout in seconds for the request. This can be a
            float or integer, which will set the same timeout value for
            the socket connect and the socket read, or an instance of
            :class:`urllib3.util.Timeout`, which gives you more fine-grained
            control over your timeouts.
        """
        self.num_requests += 1
    
        timeout_obj = self._get_timeout(timeout)
        timeout_obj.start_connect()
        conn.timeout = timeout_obj.connect_timeout
    
        # Trigger any extra validation we need to do.
        try:
>           self._validate_conn(conn)

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/connectionpool.py:382: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fdbb6afdf70>
conn = <urllib3.connection.HTTPSConnection object at 0x7fdbb433d370>

    def _validate_conn(self, conn):
        """
        Called right before a request is made, after the socket is created.
        """
        super(HTTPSConnectionPool, self)._validate_conn(conn)
    
        # Force connect early to allow us to validate the connection.
        if not getattr(conn, "sock", None):  # AppEngine might not have  `.sock`
>           conn.connect()

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/connectionpool.py:1010: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connection.HTTPSConnection object at 0x7fdbb433d370>

    def connect(self):
        # Add certificate verification
        conn = self._new_conn()
        hostname = self.host
        tls_in_tls = False
    
        if self._is_using_tunnel():
            if self.tls_in_tls_required:
                conn = self._connect_tls_proxy(hostname, conn)
                tls_in_tls = True
    
            self.sock = conn
    
            # Calls self._set_hostport(), so self.host is
            # self._tunnel_host below.
            self._tunnel()
            # Mark this connection as not reusable
            self.auto_open = 0
    
            # Override the host with the one we're requesting data from.
            hostname = self._tunnel_host
    
        server_hostname = hostname
        if self.server_hostname is not None:
            server_hostname = self.server_hostname
    
        is_time_off = datetime.date.today() < RECENT_DATE
        if is_time_off:
            warnings.warn(
                (
                    "System time is way off (before {0}). This will probably "
                    "lead to SSL verification errors"
                ).format(RECENT_DATE),
                SystemTimeWarning,
            )
    
        # Wrap socket using verification with the root certs in
        # trusted_root_certs
        default_ssl_context = False
        if self.ssl_context is None:
            default_ssl_context = True
            self.ssl_context = create_urllib3_context(
                ssl_version=resolve_ssl_version(self.ssl_version),
                cert_reqs=resolve_cert_reqs(self.cert_reqs),
            )
    
        context = self.ssl_context
        context.verify_mode = resolve_cert_reqs(self.cert_reqs)
    
        # Try to load OS default certs if none are given.
        # Works well on Windows (requires Python3.4+)
        if (
            not self.ca_certs
            and not self.ca_cert_dir
            and not self.ca_cert_data
            and default_ssl_context
            and hasattr(context, "load_default_certs")
        ):
            context.load_default_certs()
    
>       self.sock = ssl_wrap_socket(
            sock=conn,
            keyfile=self.key_file,
            certfile=self.cert_file,
            key_password=self.key_password,
            ca_certs=self.ca_certs,
            ca_cert_dir=self.ca_cert_dir,
            ca_cert_data=self.ca_cert_data,
            server_hostname=server_hostname,
            ssl_context=context,
            tls_in_tls=tls_in_tls,
        )

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/connection.py:411: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
keyfile = None, certfile = None, cert_reqs = None
ca_certs = '/home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/certifi/cacert.pem'
server_hostname = 'sparc.biolucida.net', ssl_version = None, ciphers = None
ssl_context = <ssl.SSLContext object at 0x7fdbb685b440>, ca_cert_dir = None
key_password = None, ca_cert_data = None, tls_in_tls = False

    def ssl_wrap_socket(
        sock,
        keyfile=None,
        certfile=None,
        cert_reqs=None,
        ca_certs=None,
        server_hostname=None,
        ssl_version=None,
        ciphers=None,
        ssl_context=None,
        ca_cert_dir=None,
        key_password=None,
        ca_cert_data=None,
        tls_in_tls=False,
    ):
        """
        All arguments except for server_hostname, ssl_context, and ca_cert_dir have
        the same meaning as they do when using :func:`ssl.wrap_socket`.
    
        :param server_hostname:
            When SNI is supported, the expected hostname of the certificate
        :param ssl_context:
            A pre-made :class:`SSLContext` object. If none is provided, one will
            be created using :func:`create_urllib3_context`.
        :param ciphers:
            A string of ciphers we wish the client to support.
        :param ca_cert_dir:
            A directory containing CA certificates in multiple separate files, as
            supported by OpenSSL's -CApath flag or the capath argument to
            SSLContext.load_verify_locations().
        :param key_password:
            Optional password if the keyfile is encrypted.
        :param ca_cert_data:
            Optional string containing CA certificates in PEM format suitable for
            passing as the cadata parameter to SSLContext.load_verify_locations()
        :param tls_in_tls:
            Use SSLTransport to wrap the existing socket.
        """
        context = ssl_context
        if context is None:
            # Note: This branch of code and all the variables in it are no longer
            # used by urllib3 itself. We should consider deprecating and removing
            # this code.
            context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
    
        if ca_certs or ca_cert_dir or ca_cert_data:
            try:
                context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
            except (IOError, OSError) as e:
                raise SSLError(e)
    
        elif ssl_context is None and hasattr(context, "load_default_certs"):
            # try to load OS default certs; works well on Windows (require Python3.4+)
            context.load_default_certs()
    
        # Attempt to detect if we get the goofy behavior of the
        # keyfile being encrypted and OpenSSL asking for the
        # passphrase via the terminal and instead error out.
        if keyfile and key_password is None and _is_key_file_encrypted(keyfile):
            raise SSLError("Client private key is encrypted, password is required")
    
        if certfile:
            if key_password is None:
                context.load_cert_chain(certfile, keyfile)
            else:
                context.load_cert_chain(certfile, keyfile, key_password)
    
        try:
            if hasattr(context, "set_alpn_protocols"):
                context.set_alpn_protocols(ALPN_PROTOCOLS)
        except NotImplementedError:
            pass
    
        # If we detect server_hostname is an IP address then the SNI
        # extension should not be used according to RFC3546 Section 3.1
        use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
        # SecureTransport uses server_hostname in certificate verification.
        send_sni = (use_sni_hostname and HAS_SNI) or (
            IS_SECURETRANSPORT and server_hostname
        )
        # Do not warn the user if server_hostname is an invalid SNI hostname.
        if not HAS_SNI and use_sni_hostname:
            warnings.warn(
                "An HTTPS request has been made, but the SNI (Server Name "
                "Indication) extension to TLS is not available on this platform. "
                "This may cause the server to present an incorrect TLS "
                "certificate, which can cause validation failures. You can upgrade to "
                "a newer version of Python to solve this. For more information, see "
                "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
                "#ssl-warnings",
                SNIMissingWarning,
            )
    
        if send_sni:
>           ssl_sock = _ssl_wrap_socket_impl(
                sock, context, tls_in_tls, server_hostname=server_hostname
            )

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/util/ssl_.py:428: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
ssl_context = <ssl.SSLContext object at 0x7fdbb685b440>, tls_in_tls = False
server_hostname = 'sparc.biolucida.net'

    def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None):
        if tls_in_tls:
            if not SSLTransport:
                # Import error, ssl is not available.
                raise ProxySchemeUnsupported(
                    "TLS in TLS requires support for the 'ssl' module"
                )
    
            SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
            return SSLTransport(sock, ssl_context, server_hostname)
    
        if server_hostname:
>           return ssl_context.wrap_socket(sock, server_hostname=server_hostname)

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/util/ssl_.py:472: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ssl.SSLContext object at 0x7fdbb685b440>
sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
server_side = False, do_handshake_on_connect = True, suppress_ragged_eofs = True
server_hostname = 'sparc.biolucida.net', session = None

    def wrap_socket(self, sock, server_side=False,
                    do_handshake_on_connect=True,
                    suppress_ragged_eofs=True,
                    server_hostname=None, session=None):
        # SSLSocket class handles server_hostname encoding before it calls
        # ctx._wrap_socket()
>       return self.sslsocket_class._create(
            sock=sock,
            server_side=server_side,
            do_handshake_on_connect=do_handshake_on_connect,
            suppress_ragged_eofs=suppress_ragged_eofs,
            server_hostname=server_hostname,
            context=self,
            session=session
        )

/usr/lib/python3.9/ssl.py:501: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

cls = <class 'ssl.SSLSocket'>
sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
server_side = False, do_handshake_on_connect = True, suppress_ragged_eofs = True
server_hostname = 'sparc.biolucida.net'
context = <ssl.SSLContext object at 0x7fdbb685b440>, session = None

    @classmethod
    def _create(cls, sock, server_side=False, do_handshake_on_connect=True,
                suppress_ragged_eofs=True, server_hostname=None,
                context=None, session=None):
        if sock.getsockopt(SOL_SOCKET, SO_TYPE) != SOCK_STREAM:
            raise NotImplementedError("only stream sockets are supported")
        if server_side:
            if server_hostname:
                raise ValueError("server_hostname can only be specified "
                                 "in client mode")
            if session is not None:
                raise ValueError("session can only be specified in "
                                 "client mode")
        if context.check_hostname and not server_hostname:
            raise ValueError("check_hostname requires server_hostname")
    
        kwargs = dict(
            family=sock.family, type=sock.type, proto=sock.proto,
            fileno=sock.fileno()
        )
        self = cls.__new__(cls, **kwargs)
        super(SSLSocket, self).__init__(**kwargs)
        sock_timeout = sock.gettimeout()
        sock.detach()
    
        self._context = context
        self._session = session
        self._closed = False
        self._sslobj = None
        self.server_side = server_side
        self.server_hostname = context._encode_hostname(server_hostname)
        self.do_handshake_on_connect = do_handshake_on_connect
        self.suppress_ragged_eofs = suppress_ragged_eofs
    
        # See if we are connected
        try:
            self.getpeername()
        except OSError as e:
            if e.errno != errno.ENOTCONN:
                raise
            connected = False
            blocking = self.getblocking()
            self.setblocking(False)
            try:
                # We are not connected so this is not supposed to block, but
                # testing revealed otherwise on macOS and Windows so we do
                # the non-blocking dance regardless. Our raise when any data
                # is found means consuming the data is harmless.
                notconn_pre_handshake_data = self.recv(1)
            except OSError as e:
                # EINVAL occurs for recv(1) on non-connected on unix sockets.
                if e.errno not in (errno.ENOTCONN, errno.EINVAL):
                    raise
                notconn_pre_handshake_data = b''
            self.setblocking(blocking)
            if notconn_pre_handshake_data:
                # This prevents pending data sent to the socket before it was
                # closed from escaping to the caller who could otherwise
                # presume it came through a successful TLS connection.
                reason = "Closed before TLS handshake with data in recv buffer."
                notconn_pre_handshake_data_error = SSLError(e.errno, reason)
                # Add the SSLError attributes that _ssl.c always adds.
                notconn_pre_handshake_data_error.reason = reason
                notconn_pre_handshake_data_error.library = None
                try:
                    self.close()
                except OSError:
                    pass
                try:
                    raise notconn_pre_handshake_data_error
                finally:
                    # Explicitly break the reference cycle.
                    notconn_pre_handshake_data_error = None
        else:
            connected = True
    
        self.settimeout(sock_timeout)  # Must come after setblocking() calls.
        self._connected = connected
        if connected:
            # create the SSL object
            try:
                self._sslobj = self._context._wrap_socket(
                    self, server_side, self.server_hostname,
                    owner=self, session=self._session,
                )
                if do_handshake_on_connect:
                    timeout = self.gettimeout()
                    if timeout == 0.0:
                        # non-blocking
                        raise ValueError("do_handshake_on_connect should not be specified for non-blocking sockets")
>                   self.do_handshake()

/usr/lib/python3.9/ssl.py:1074: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ssl.SSLSocket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
block = False

    @_sslcopydoc
    def do_handshake(self, block=False):
        self._check_connected()
        timeout = self.gettimeout()
        try:
            if timeout == 0.0 and block:
                self.settimeout(None)
>           self._sslobj.do_handshake()
E           urllib3.exceptions.ProtocolError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))

/usr/lib/python3.9/ssl.py:1343: ProtocolError

During handling of the above exception, another exception occurred:

client = <FlaskClient <Flask 'app.main'>>

    def test_neurolucida_thumbnail(client):
        query_string = {'datasetId': 37, 'version': 3, 'path': 'files/derivative/sub-54-5/TJU_3Scan_ratheart54-5_updated_06_11_19_Fiducials.xml'}
>       r = client.get('/thumbnail/neurolucida', query_string=query_string)

tests/test_thumbnails.py:20: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/werkzeug/test.py:1029: in get
    return self.open(*args, **kw)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/testing.py:222: in open
    return Client.open(
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/werkzeug/test.py:993: in open
    response = self.run_wsgi_app(environ.copy(), buffered=buffered)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/werkzeug/test.py:884: in run_wsgi_app
    rv = run_wsgi_app(self.application, environ, buffered=buffered)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/werkzeug/test.py:1119: in run_wsgi_app
    app_rv = app(environ, start_response)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/app.py:2463: in __call__
    return self.wsgi_app(environ, start_response)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/app.py:2449: in wsgi_app
    response = self.handle_exception(e)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask_cors/extension.py:161: in wrapped_function
    return cors_after_request(app.make_response(f(*args, **kwargs)))
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/app.py:1866: in handle_exception
    reraise(exc_type, exc_value, tb)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/_compat.py:39: in reraise
    raise value
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/app.py:2446: in wsgi_app
    response = self.full_dispatch_request()
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/app.py:1951: in full_dispatch_request
    rv = self.handle_user_exception(e)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask_cors/extension.py:161: in wrapped_function
    return cors_after_request(app.make_response(f(*args, **kwargs)))
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/app.py:1820: in handle_user_exception
    reraise(exc_type, exc_value, tb)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/_compat.py:39: in reraise
    raise value
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/app.py:1949: in full_dispatch_request
    rv = self.dispatch_request()
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/app.py:1935: in dispatch_request
    return self.view_functions[rule.endpoint](**req.view_args)
app/main.py:306: in thumbnail_from_neurolucida_file
    response = requests.get(url, params=query_args)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/requests/api.py:76: in get
    return request('get', url, params=params, **kwargs)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/requests/api.py:61: in request
    return session.request(method=method, url=url, **kwargs)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/requests/sessions.py:542: in request
    resp = self.send(prep, **send_kwargs)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/requests/sessions.py:655: in send
    r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <requests.adapters.HTTPAdapter object at 0x7fdbb6ac3bb0>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()

    def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
        """Sends PreparedRequest object. Returns Response object.
    
        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        """
    
        try:
            conn = self.get_connection(request.url, proxies)
        except LocationValueError as e:
            raise InvalidURL(e, request=request)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
    
        chunked = not (request.body is None or 'Content-Length' in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError as e:
                # this may raise a string formatting error.
                err = ("Invalid timeout {}. Pass a (connect, read) "
                       "timeout tuple, or a single float to set "
                       "both timeouts to the same value".format(timeout))
                raise ValueError(err)
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
            if not chunked:
                resp = conn.urlopen(
                    method=request.method,
                    url=url,
                    body=request.body,
                    headers=request.headers,
                    redirect=False,
                    assert_same_host=False,
                    preload_content=False,
                    decode_content=False,
                    retries=self.max_retries,
                    timeout=timeout
                )
    
            # Send the request.
            else:
                if hasattr(conn, 'proxy_pool'):
                    conn = conn.proxy_pool
    
                low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
    
                try:
                    low_conn.putrequest(request.method,
                                        url,
                                        skip_accept_encoding=True)
    
                    for header, value in request.headers.items():
                        low_conn.putheader(header, value)
    
                    low_conn.endheaders()
    
                    for i in request.body:
                        low_conn.send(hex(len(i))[2:].encode('utf-8'))
                        low_conn.send(b'\r\n')
                        low_conn.send(i)
                        low_conn.send(b'\r\n')
                    low_conn.send(b'0\r\n\r\n')
    
                    # Receive the response from the server
                    try:
                        # For Python 2.7, use buffering of HTTP responses
                        r = low_conn.getresponse(buffering=True)
                    except TypeError:
                        # For compatibility with Python 3.3+
                        r = low_conn.getresponse()
    
                    resp = HTTPResponse.from_httplib(
                        r,
                        pool=conn,
                        connection=low_conn,
                        preload_content=False,
                        decode_content=False
                    )
                except:
                    # If we hit any problems here, clean up the connection.
                    # Then, reraise so that we can handle the actual exception.
                    low_conn.close()
                    raise
    
        except (ProtocolError, socket.error) as err:
>           raise ConnectionError(err, request=request)
E           requests.exceptions.ConnectionError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/requests/adapters.py:498: ConnectionError
____________________ test_neurolucida_thumbnail_dataset_221 ____________________

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fdbb4852100>
method = 'GET'
url = '/thumbnail?datasetId=221&version=3&path=files%2Fderivative%2Fsub-M168%2Fdigital-traces%2FpCm168_AAV_Z_20x_191211_S3B_lx_IGS.xml'
body = None
headers = {'User-Agent': 'python-requests/2.25.1', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None
response_kw = {'decode_content': False, 'preload_content': False}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/thumbnail', query='datasetId=221&version=3&path=files%2Fderivative%2Fsub-M168%2Fdigital-traces%2FpCm168_AAV_Z_20x_191211_S3B_lx_IGS.xml', fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False

    def urlopen(
        self,
        method,
        url,
        body=None,
        headers=None,
        retries=None,
        redirect=True,
        assert_same_host=True,
        timeout=_Default,
        pool_timeout=None,
        release_conn=None,
        chunked=False,
        body_pos=None,
        **response_kw
    ):
        """
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.
    
        .. note::
    
           More commonly, it's appropriate to use a convenience method provided
           by :class:`.RequestMethods`, such as :meth:`request`.
    
        .. note::
    
           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.
    
        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)
    
        :param url:
            The URL to perform the request on.
    
        :param body:
            Data to send in the request body, either :class:`str`, :class:`bytes`,
            an iterable of :class:`str`/:class:`bytes`, or a file-like object.
    
        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.
    
        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.
    
            Pass ``None`` to retry until you receive a response. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.
    
            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.
    
        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
    
        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.
    
        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When ``False``, you can
            use the pool on an HTTP proxy and request foreign hosts.
    
        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.
    
        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.
    
        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of
            ``response_kw.get('preload_content', True)``.
    
        :param chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.
    
        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.
    
        :param \\**response_kw:
            Additional parameters are passed to
            :meth:`urllib3.response.HTTPResponse.from_httplib`
        """
    
        parsed_url = parse_url(url)
        destination_scheme = parsed_url.scheme
    
        if headers is None:
            headers = self.headers
    
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
    
        if release_conn is None:
            release_conn = response_kw.get("preload_content", True)
    
        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)
    
        # Ensure that the URL we're connecting to is properly encoded
        if url.startswith("/"):
            url = six.ensure_str(_encode_target(url))
        else:
            url = six.ensure_str(parsed_url.url)
    
        conn = None
    
        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/urllib3/urllib3/issues/651>
        release_this_conn = release_conn
    
        http_tunnel_required = connection_requires_http_tunnel(
            self.proxy, self.proxy_config, destination_scheme
        )
    
        # Merge the proxy headers. Only done when not using HTTP CONNECT. We
        # have to copy the headers dict so we can safely change it without those
        # changes being reflected in anyone else's copy.
        if not http_tunnel_required:
            headers = headers.copy()
            headers.update(self.proxy_headers)
    
        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None
    
        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False
    
        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)
    
        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)
    
            conn.timeout = timeout_obj.connect_timeout
    
            is_new_proxy_conn = self.proxy is not None and not getattr(
                conn, "sock", None
            )
            if is_new_proxy_conn and http_tunnel_required:
                self._prepare_proxy(conn)
    
            # Make the request on the httplib connection object.
>           httplib_response = self._make_request(
                conn,
                method,
                url,
                timeout=timeout_obj,
                body=body,
                headers=headers,
                chunked=chunked,
            )

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/connectionpool.py:699: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fdbb4852100>
conn = <urllib3.connection.HTTPSConnection object at 0x7fdbb5b47ee0>
method = 'GET'
url = '/thumbnail?datasetId=221&version=3&path=files%2Fderivative%2Fsub-M168%2Fdigital-traces%2FpCm168_AAV_Z_20x_191211_S3B_lx_IGS.xml'
timeout = Timeout(connect=None, read=None, total=None), chunked = False
httplib_request_kw = {'body': None, 'headers': {'User-Agent': 'python-requests/2.25.1', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}}
timeout_obj = Timeout(connect=None, read=None, total=None)

    def _make_request(
        self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
    ):
        """
        Perform a request on a given urllib connection object taken from our
        pool.
    
        :param conn:
            a connection from one of our connection pools
    
        :param timeout:
            Socket timeout in seconds for the request. This can be a
            float or integer, which will set the same timeout value for
            the socket connect and the socket read, or an instance of
            :class:`urllib3.util.Timeout`, which gives you more fine-grained
            control over your timeouts.
        """
        self.num_requests += 1
    
        timeout_obj = self._get_timeout(timeout)
        timeout_obj.start_connect()
        conn.timeout = timeout_obj.connect_timeout
    
        # Trigger any extra validation we need to do.
        try:
>           self._validate_conn(conn)

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/connectionpool.py:382: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fdbb4852100>
conn = <urllib3.connection.HTTPSConnection object at 0x7fdbb5b47ee0>

    def _validate_conn(self, conn):
        """
        Called right before a request is made, after the socket is created.
        """
        super(HTTPSConnectionPool, self)._validate_conn(conn)
    
        # Force connect early to allow us to validate the connection.
        if not getattr(conn, "sock", None):  # AppEngine might not have  `.sock`
>           conn.connect()

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/connectionpool.py:1010: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connection.HTTPSConnection object at 0x7fdbb5b47ee0>

    def connect(self):
        # Add certificate verification
        conn = self._new_conn()
        hostname = self.host
        tls_in_tls = False
    
        if self._is_using_tunnel():
            if self.tls_in_tls_required:
                conn = self._connect_tls_proxy(hostname, conn)
                tls_in_tls = True
    
            self.sock = conn
    
            # Calls self._set_hostport(), so self.host is
            # self._tunnel_host below.
            self._tunnel()
            # Mark this connection as not reusable
            self.auto_open = 0
    
            # Override the host with the one we're requesting data from.
            hostname = self._tunnel_host
    
        server_hostname = hostname
        if self.server_hostname is not None:
            server_hostname = self.server_hostname
    
        is_time_off = datetime.date.today() < RECENT_DATE
        if is_time_off:
            warnings.warn(
                (
                    "System time is way off (before {0}). This will probably "
                    "lead to SSL verification errors"
                ).format(RECENT_DATE),
                SystemTimeWarning,
            )
    
        # Wrap socket using verification with the root certs in
        # trusted_root_certs
        default_ssl_context = False
        if self.ssl_context is None:
            default_ssl_context = True
            self.ssl_context = create_urllib3_context(
                ssl_version=resolve_ssl_version(self.ssl_version),
                cert_reqs=resolve_cert_reqs(self.cert_reqs),
            )
    
        context = self.ssl_context
        context.verify_mode = resolve_cert_reqs(self.cert_reqs)
    
        # Try to load OS default certs if none are given.
        # Works well on Windows (requires Python3.4+)
        if (
            not self.ca_certs
            and not self.ca_cert_dir
            and not self.ca_cert_data
            and default_ssl_context
            and hasattr(context, "load_default_certs")
        ):
            context.load_default_certs()
    
>       self.sock = ssl_wrap_socket(
            sock=conn,
            keyfile=self.key_file,
            certfile=self.cert_file,
            key_password=self.key_password,
            ca_certs=self.ca_certs,
            ca_cert_dir=self.ca_cert_dir,
            ca_cert_data=self.ca_cert_data,
            server_hostname=server_hostname,
            ssl_context=context,
            tls_in_tls=tls_in_tls,
        )

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/connection.py:411: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
keyfile = None, certfile = None, cert_reqs = None
ca_certs = '/home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/certifi/cacert.pem'
server_hostname = 'sparc.biolucida.net', ssl_version = None, ciphers = None
ssl_context = <ssl.SSLContext object at 0x7fdbb4d06a40>, ca_cert_dir = None
key_password = None, ca_cert_data = None, tls_in_tls = False

    def ssl_wrap_socket(
        sock,
        keyfile=None,
        certfile=None,
        cert_reqs=None,
        ca_certs=None,
        server_hostname=None,
        ssl_version=None,
        ciphers=None,
        ssl_context=None,
        ca_cert_dir=None,
        key_password=None,
        ca_cert_data=None,
        tls_in_tls=False,
    ):
        """
        All arguments except for server_hostname, ssl_context, and ca_cert_dir have
        the same meaning as they do when using :func:`ssl.wrap_socket`.
    
        :param server_hostname:
            When SNI is supported, the expected hostname of the certificate
        :param ssl_context:
            A pre-made :class:`SSLContext` object. If none is provided, one will
            be created using :func:`create_urllib3_context`.
        :param ciphers:
            A string of ciphers we wish the client to support.
        :param ca_cert_dir:
            A directory containing CA certificates in multiple separate files, as
            supported by OpenSSL's -CApath flag or the capath argument to
            SSLContext.load_verify_locations().
        :param key_password:
            Optional password if the keyfile is encrypted.
        :param ca_cert_data:
            Optional string containing CA certificates in PEM format suitable for
            passing as the cadata parameter to SSLContext.load_verify_locations()
        :param tls_in_tls:
            Use SSLTransport to wrap the existing socket.
        """
        context = ssl_context
        if context is None:
            # Note: This branch of code and all the variables in it are no longer
            # used by urllib3 itself. We should consider deprecating and removing
            # this code.
            context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
    
        if ca_certs or ca_cert_dir or ca_cert_data:
            try:
                context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
            except (IOError, OSError) as e:
                raise SSLError(e)
    
        elif ssl_context is None and hasattr(context, "load_default_certs"):
            # try to load OS default certs; works well on Windows (require Python3.4+)
            context.load_default_certs()
    
        # Attempt to detect if we get the goofy behavior of the
        # keyfile being encrypted and OpenSSL asking for the
        # passphrase via the terminal and instead error out.
        if keyfile and key_password is None and _is_key_file_encrypted(keyfile):
            raise SSLError("Client private key is encrypted, password is required")
    
        if certfile:
            if key_password is None:
                context.load_cert_chain(certfile, keyfile)
            else:
                context.load_cert_chain(certfile, keyfile, key_password)
    
        try:
            if hasattr(context, "set_alpn_protocols"):
                context.set_alpn_protocols(ALPN_PROTOCOLS)
        except NotImplementedError:
            pass
    
        # If we detect server_hostname is an IP address then the SNI
        # extension should not be used according to RFC3546 Section 3.1
        use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
        # SecureTransport uses server_hostname in certificate verification.
        send_sni = (use_sni_hostname and HAS_SNI) or (
            IS_SECURETRANSPORT and server_hostname
        )
        # Do not warn the user if server_hostname is an invalid SNI hostname.
        if not HAS_SNI and use_sni_hostname:
            warnings.warn(
                "An HTTPS request has been made, but the SNI (Server Name "
                "Indication) extension to TLS is not available on this platform. "
                "This may cause the server to present an incorrect TLS "
                "certificate, which can cause validation failures. You can upgrade to "
                "a newer version of Python to solve this. For more information, see "
                "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
                "#ssl-warnings",
                SNIMissingWarning,
            )
    
        if send_sni:
>           ssl_sock = _ssl_wrap_socket_impl(
                sock, context, tls_in_tls, server_hostname=server_hostname
            )

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/util/ssl_.py:428: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
ssl_context = <ssl.SSLContext object at 0x7fdbb4d06a40>, tls_in_tls = False
server_hostname = 'sparc.biolucida.net'

    def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None):
        if tls_in_tls:
            if not SSLTransport:
                # Import error, ssl is not available.
                raise ProxySchemeUnsupported(
                    "TLS in TLS requires support for the 'ssl' module"
                )
    
            SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
            return SSLTransport(sock, ssl_context, server_hostname)
    
        if server_hostname:
>           return ssl_context.wrap_socket(sock, server_hostname=server_hostname)

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/util/ssl_.py:472: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ssl.SSLContext object at 0x7fdbb4d06a40>
sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
server_side = False, do_handshake_on_connect = True, suppress_ragged_eofs = True
server_hostname = 'sparc.biolucida.net', session = None

    def wrap_socket(self, sock, server_side=False,
                    do_handshake_on_connect=True,
                    suppress_ragged_eofs=True,
                    server_hostname=None, session=None):
        # SSLSocket class handles server_hostname encoding before it calls
        # ctx._wrap_socket()
>       return self.sslsocket_class._create(
            sock=sock,
            server_side=server_side,
            do_handshake_on_connect=do_handshake_on_connect,
            suppress_ragged_eofs=suppress_ragged_eofs,
            server_hostname=server_hostname,
            context=self,
            session=session
        )

/usr/lib/python3.9/ssl.py:501: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

cls = <class 'ssl.SSLSocket'>
sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
server_side = False, do_handshake_on_connect = True, suppress_ragged_eofs = True
server_hostname = 'sparc.biolucida.net'
context = <ssl.SSLContext object at 0x7fdbb4d06a40>, session = None

    @classmethod
    def _create(cls, sock, server_side=False, do_handshake_on_connect=True,
                suppress_ragged_eofs=True, server_hostname=None,
                context=None, session=None):
        if sock.getsockopt(SOL_SOCKET, SO_TYPE) != SOCK_STREAM:
            raise NotImplementedError("only stream sockets are supported")
        if server_side:
            if server_hostname:
                raise ValueError("server_hostname can only be specified "
                                 "in client mode")
            if session is not None:
                raise ValueError("session can only be specified in "
                                 "client mode")
        if context.check_hostname and not server_hostname:
            raise ValueError("check_hostname requires server_hostname")
    
        kwargs = dict(
            family=sock.family, type=sock.type, proto=sock.proto,
            fileno=sock.fileno()
        )
        self = cls.__new__(cls, **kwargs)
        super(SSLSocket, self).__init__(**kwargs)
        sock_timeout = sock.gettimeout()
        sock.detach()
    
        self._context = context
        self._session = session
        self._closed = False
        self._sslobj = None
        self.server_side = server_side
        self.server_hostname = context._encode_hostname(server_hostname)
        self.do_handshake_on_connect = do_handshake_on_connect
        self.suppress_ragged_eofs = suppress_ragged_eofs
    
        # See if we are connected
        try:
            self.getpeername()
        except OSError as e:
            if e.errno != errno.ENOTCONN:
                raise
            connected = False
            blocking = self.getblocking()
            self.setblocking(False)
            try:
                # We are not connected so this is not supposed to block, but
                # testing revealed otherwise on macOS and Windows so we do
                # the non-blocking dance regardless. Our raise when any data
                # is found means consuming the data is harmless.
                notconn_pre_handshake_data = self.recv(1)
            except OSError as e:
                # EINVAL occurs for recv(1) on non-connected on unix sockets.
                if e.errno not in (errno.ENOTCONN, errno.EINVAL):
                    raise
                notconn_pre_handshake_data = b''
            self.setblocking(blocking)
            if notconn_pre_handshake_data:
                # This prevents pending data sent to the socket before it was
                # closed from escaping to the caller who could otherwise
                # presume it came through a successful TLS connection.
                reason = "Closed before TLS handshake with data in recv buffer."
                notconn_pre_handshake_data_error = SSLError(e.errno, reason)
                # Add the SSLError attributes that _ssl.c always adds.
                notconn_pre_handshake_data_error.reason = reason
                notconn_pre_handshake_data_error.library = None
                try:
                    self.close()
                except OSError:
                    pass
                try:
                    raise notconn_pre_handshake_data_error
                finally:
                    # Explicitly break the reference cycle.
                    notconn_pre_handshake_data_error = None
        else:
            connected = True
    
        self.settimeout(sock_timeout)  # Must come after setblocking() calls.
        self._connected = connected
        if connected:
            # create the SSL object
            try:
                self._sslobj = self._context._wrap_socket(
                    self, server_side, self.server_hostname,
                    owner=self, session=self._session,
                )
                if do_handshake_on_connect:
                    timeout = self.gettimeout()
                    if timeout == 0.0:
                        # non-blocking
                        raise ValueError("do_handshake_on_connect should not be specified for non-blocking sockets")
>                   self.do_handshake()

/usr/lib/python3.9/ssl.py:1074: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ssl.SSLSocket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
block = False

    @_sslcopydoc
    def do_handshake(self, block=False):
        self._check_connected()
        timeout = self.gettimeout()
        try:
            if timeout == 0.0 and block:
                self.settimeout(None)
>           self._sslobj.do_handshake()
E           ConnectionResetError: [Errno 104] Connection reset by peer

/usr/lib/python3.9/ssl.py:1343: ConnectionResetError

During handling of the above exception, another exception occurred:

self = <requests.adapters.HTTPAdapter object at 0x7fdbb48524c0>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()

    def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
        """Sends PreparedRequest object. Returns Response object.
    
        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        """
    
        try:
            conn = self.get_connection(request.url, proxies)
        except LocationValueError as e:
            raise InvalidURL(e, request=request)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
    
        chunked = not (request.body is None or 'Content-Length' in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError as e:
                # this may raise a string formatting error.
                err = ("Invalid timeout {}. Pass a (connect, read) "
                       "timeout tuple, or a single float to set "
                       "both timeouts to the same value".format(timeout))
                raise ValueError(err)
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
            if not chunked:
>               resp = conn.urlopen(
                    method=request.method,
                    url=url,
                    body=request.body,
                    headers=request.headers,
                    redirect=False,
                    assert_same_host=False,
                    preload_content=False,
                    decode_content=False,
                    retries=self.max_retries,
                    timeout=timeout
                )

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/requests/adapters.py:439: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fdbb4852100>
method = 'GET'
url = '/thumbnail?datasetId=221&version=3&path=files%2Fderivative%2Fsub-M168%2Fdigital-traces%2FpCm168_AAV_Z_20x_191211_S3B_lx_IGS.xml'
body = None
headers = {'User-Agent': 'python-requests/2.25.1', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None
response_kw = {'decode_content': False, 'preload_content': False}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/thumbnail', query='datasetId=221&version=3&path=files%2Fderivative%2Fsub-M168%2Fdigital-traces%2FpCm168_AAV_Z_20x_191211_S3B_lx_IGS.xml', fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False

    def urlopen(
        self,
        method,
        url,
        body=None,
        headers=None,
        retries=None,
        redirect=True,
        assert_same_host=True,
        timeout=_Default,
        pool_timeout=None,
        release_conn=None,
        chunked=False,
        body_pos=None,
        **response_kw
    ):
        """
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.
    
        .. note::
    
           More commonly, it's appropriate to use a convenience method provided
           by :class:`.RequestMethods`, such as :meth:`request`.
    
        .. note::
    
           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.
    
        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)
    
        :param url:
            The URL to perform the request on.
    
        :param body:
            Data to send in the request body, either :class:`str`, :class:`bytes`,
            an iterable of :class:`str`/:class:`bytes`, or a file-like object.
    
        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.
    
        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.
    
            Pass ``None`` to retry until you receive a response. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.
    
            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.
    
        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
    
        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.
    
        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When ``False``, you can
            use the pool on an HTTP proxy and request foreign hosts.
    
        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.
    
        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.
    
        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of
            ``response_kw.get('preload_content', True)``.
    
        :param chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.
    
        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.
    
        :param \\**response_kw:
            Additional parameters are passed to
            :meth:`urllib3.response.HTTPResponse.from_httplib`
        """
    
        parsed_url = parse_url(url)
        destination_scheme = parsed_url.scheme
    
        if headers is None:
            headers = self.headers
    
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
    
        if release_conn is None:
            release_conn = response_kw.get("preload_content", True)
    
        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)
    
        # Ensure that the URL we're connecting to is properly encoded
        if url.startswith("/"):
            url = six.ensure_str(_encode_target(url))
        else:
            url = six.ensure_str(parsed_url.url)
    
        conn = None
    
        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/urllib3/urllib3/issues/651>
        release_this_conn = release_conn
    
        http_tunnel_required = connection_requires_http_tunnel(
            self.proxy, self.proxy_config, destination_scheme
        )
    
        # Merge the proxy headers. Only done when not using HTTP CONNECT. We
        # have to copy the headers dict so we can safely change it without those
        # changes being reflected in anyone else's copy.
        if not http_tunnel_required:
            headers = headers.copy()
            headers.update(self.proxy_headers)
    
        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None
    
        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False
    
        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)
    
        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)
    
            conn.timeout = timeout_obj.connect_timeout
    
            is_new_proxy_conn = self.proxy is not None and not getattr(
                conn, "sock", None
            )
            if is_new_proxy_conn and http_tunnel_required:
                self._prepare_proxy(conn)
    
            # Make the request on the httplib connection object.
            httplib_response = self._make_request(
                conn,
                method,
                url,
                timeout=timeout_obj,
                body=body,
                headers=headers,
                chunked=chunked,
            )
    
            # If we're going to release the connection in ``finally:``, then
            # the response doesn't need to know about the connection. Otherwise
            # it will also try to release it and we'll have a double-release
            # mess.
            response_conn = conn if not release_conn else None
    
            # Pass method to Response for length checking
            response_kw["request_method"] = method
    
            # Import httplib's response into our own wrapper object
            response = self.ResponseCls.from_httplib(
                httplib_response,
                pool=self,
                connection=response_conn,
                retries=retries,
                **response_kw
            )
    
            # Everything went great!
            clean_exit = True
    
        except EmptyPoolError:
            # Didn't get a connection from the pool, no need to clean up
            clean_exit = True
            release_this_conn = False
            raise
    
        except (
            TimeoutError,
            HTTPException,
            SocketError,
            ProtocolError,
            BaseSSLError,
            SSLError,
            CertificateError,
        ) as e:
            # Discard the connection for these exceptions. It will be
            # replaced during the next _get_conn() call.
            clean_exit = False
            if isinstance(e, (BaseSSLError, CertificateError)):
                e = SSLError(e)
            elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
                e = ProxyError("Cannot connect to proxy.", e)
            elif isinstance(e, (SocketError, HTTPException)):
                e = ProtocolError("Connection aborted.", e)
    
>           retries = retries.increment(
                method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
            )

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/connectionpool.py:755: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = Retry(total=0, connect=None, read=False, redirect=None, status=None)
method = 'GET'
url = '/thumbnail?datasetId=221&version=3&path=files%2Fderivative%2Fsub-M168%2Fdigital-traces%2FpCm168_AAV_Z_20x_191211_S3B_lx_IGS.xml'
response = None
error = ProtocolError('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))
_pool = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fdbb4852100>
_stacktrace = <traceback object at 0x7fdbb55ca500>

    def increment(
        self,
        method=None,
        url=None,
        response=None,
        error=None,
        _pool=None,
        _stacktrace=None,
    ):
        """Return a new Retry object with incremented retry counters.
    
        :param response: A response object, or None, if the server did not
            return a response.
        :type response: :class:`~urllib3.response.HTTPResponse`
        :param Exception error: An error encountered during the request, or
            None if the response was received successfully.
    
        :return: A new ``Retry`` object.
        """
        if self.total is False and error:
            # Disabled, indicate to re-raise the error.
            raise six.reraise(type(error), error, _stacktrace)
    
        total = self.total
        if total is not None:
            total -= 1
    
        connect = self.connect
        read = self.read
        redirect = self.redirect
        status_count = self.status
        other = self.other
        cause = "unknown"
        status = None
        redirect_location = None
    
        if error and self._is_connection_error(error):
            # Connect retry?
            if connect is False:
                raise six.reraise(type(error), error, _stacktrace)
            elif connect is not None:
                connect -= 1
    
        elif error and self._is_read_error(error):
            # Read retry?
            if read is False or not self._is_method_retryable(method):
>               raise six.reraise(type(error), error, _stacktrace)

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/util/retry.py:532: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

tp = <class 'urllib3.exceptions.ProtocolError'>, value = None, tb = None

    def reraise(tp, value, tb=None):
        try:
            if value is None:
                value = tp()
            if value.__traceback__ is not tb:
>               raise value.with_traceback(tb)

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/packages/six.py:734: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fdbb4852100>
method = 'GET'
url = '/thumbnail?datasetId=221&version=3&path=files%2Fderivative%2Fsub-M168%2Fdigital-traces%2FpCm168_AAV_Z_20x_191211_S3B_lx_IGS.xml'
body = None
headers = {'User-Agent': 'python-requests/2.25.1', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None
response_kw = {'decode_content': False, 'preload_content': False}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/thumbnail', query='datasetId=221&version=3&path=files%2Fderivative%2Fsub-M168%2Fdigital-traces%2FpCm168_AAV_Z_20x_191211_S3B_lx_IGS.xml', fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False

    def urlopen(
        self,
        method,
        url,
        body=None,
        headers=None,
        retries=None,
        redirect=True,
        assert_same_host=True,
        timeout=_Default,
        pool_timeout=None,
        release_conn=None,
        chunked=False,
        body_pos=None,
        **response_kw
    ):
        """
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.
    
        .. note::
    
           More commonly, it's appropriate to use a convenience method provided
           by :class:`.RequestMethods`, such as :meth:`request`.
    
        .. note::
    
           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.
    
        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)
    
        :param url:
            The URL to perform the request on.
    
        :param body:
            Data to send in the request body, either :class:`str`, :class:`bytes`,
            an iterable of :class:`str`/:class:`bytes`, or a file-like object.
    
        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.
    
        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.
    
            Pass ``None`` to retry until you receive a response. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.
    
            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.
    
        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
    
        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.
    
        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When ``False``, you can
            use the pool on an HTTP proxy and request foreign hosts.
    
        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.
    
        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.
    
        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of
            ``response_kw.get('preload_content', True)``.
    
        :param chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.
    
        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.
    
        :param \\**response_kw:
            Additional parameters are passed to
            :meth:`urllib3.response.HTTPResponse.from_httplib`
        """
    
        parsed_url = parse_url(url)
        destination_scheme = parsed_url.scheme
    
        if headers is None:
            headers = self.headers
    
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
    
        if release_conn is None:
            release_conn = response_kw.get("preload_content", True)
    
        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)
    
        # Ensure that the URL we're connecting to is properly encoded
        if url.startswith("/"):
            url = six.ensure_str(_encode_target(url))
        else:
            url = six.ensure_str(parsed_url.url)
    
        conn = None
    
        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/urllib3/urllib3/issues/651>
        release_this_conn = release_conn
    
        http_tunnel_required = connection_requires_http_tunnel(
            self.proxy, self.proxy_config, destination_scheme
        )
    
        # Merge the proxy headers. Only done when not using HTTP CONNECT. We
        # have to copy the headers dict so we can safely change it without those
        # changes being reflected in anyone else's copy.
        if not http_tunnel_required:
            headers = headers.copy()
            headers.update(self.proxy_headers)
    
        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None
    
        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False
    
        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)
    
        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)
    
            conn.timeout = timeout_obj.connect_timeout
    
            is_new_proxy_conn = self.proxy is not None and not getattr(
                conn, "sock", None
            )
            if is_new_proxy_conn and http_tunnel_required:
                self._prepare_proxy(conn)
    
            # Make the request on the httplib connection object.
>           httplib_response = self._make_request(
                conn,
                method,
                url,
                timeout=timeout_obj,
                body=body,
                headers=headers,
                chunked=chunked,
            )

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/connectionpool.py:699: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fdbb4852100>
conn = <urllib3.connection.HTTPSConnection object at 0x7fdbb5b47ee0>
method = 'GET'
url = '/thumbnail?datasetId=221&version=3&path=files%2Fderivative%2Fsub-M168%2Fdigital-traces%2FpCm168_AAV_Z_20x_191211_S3B_lx_IGS.xml'
timeout = Timeout(connect=None, read=None, total=None), chunked = False
httplib_request_kw = {'body': None, 'headers': {'User-Agent': 'python-requests/2.25.1', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}}
timeout_obj = Timeout(connect=None, read=None, total=None)

    def _make_request(
        self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
    ):
        """
        Perform a request on a given urllib connection object taken from our
        pool.
    
        :param conn:
            a connection from one of our connection pools
    
        :param timeout:
            Socket timeout in seconds for the request. This can be a
            float or integer, which will set the same timeout value for
            the socket connect and the socket read, or an instance of
            :class:`urllib3.util.Timeout`, which gives you more fine-grained
            control over your timeouts.
        """
        self.num_requests += 1
    
        timeout_obj = self._get_timeout(timeout)
        timeout_obj.start_connect()
        conn.timeout = timeout_obj.connect_timeout
    
        # Trigger any extra validation we need to do.
        try:
>           self._validate_conn(conn)

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/connectionpool.py:382: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fdbb4852100>
conn = <urllib3.connection.HTTPSConnection object at 0x7fdbb5b47ee0>

    def _validate_conn(self, conn):
        """
        Called right before a request is made, after the socket is created.
        """
        super(HTTPSConnectionPool, self)._validate_conn(conn)
    
        # Force connect early to allow us to validate the connection.
        if not getattr(conn, "sock", None):  # AppEngine might not have  `.sock`
>           conn.connect()

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/connectionpool.py:1010: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connection.HTTPSConnection object at 0x7fdbb5b47ee0>

    def connect(self):
        # Add certificate verification
        conn = self._new_conn()
        hostname = self.host
        tls_in_tls = False
    
        if self._is_using_tunnel():
            if self.tls_in_tls_required:
                conn = self._connect_tls_proxy(hostname, conn)
                tls_in_tls = True
    
            self.sock = conn
    
            # Calls self._set_hostport(), so self.host is
            # self._tunnel_host below.
            self._tunnel()
            # Mark this connection as not reusable
            self.auto_open = 0
    
            # Override the host with the one we're requesting data from.
            hostname = self._tunnel_host
    
        server_hostname = hostname
        if self.server_hostname is not None:
            server_hostname = self.server_hostname
    
        is_time_off = datetime.date.today() < RECENT_DATE
        if is_time_off:
            warnings.warn(
                (
                    "System time is way off (before {0}). This will probably "
                    "lead to SSL verification errors"
                ).format(RECENT_DATE),
                SystemTimeWarning,
            )
    
        # Wrap socket using verification with the root certs in
        # trusted_root_certs
        default_ssl_context = False
        if self.ssl_context is None:
            default_ssl_context = True
            self.ssl_context = create_urllib3_context(
                ssl_version=resolve_ssl_version(self.ssl_version),
                cert_reqs=resolve_cert_reqs(self.cert_reqs),
            )
    
        context = self.ssl_context
        context.verify_mode = resolve_cert_reqs(self.cert_reqs)
    
        # Try to load OS default certs if none are given.
        # Works well on Windows (requires Python3.4+)
        if (
            not self.ca_certs
            and not self.ca_cert_dir
            and not self.ca_cert_data
            and default_ssl_context
            and hasattr(context, "load_default_certs")
        ):
            context.load_default_certs()
    
>       self.sock = ssl_wrap_socket(
            sock=conn,
            keyfile=self.key_file,
            certfile=self.cert_file,
            key_password=self.key_password,
            ca_certs=self.ca_certs,
            ca_cert_dir=self.ca_cert_dir,
            ca_cert_data=self.ca_cert_data,
            server_hostname=server_hostname,
            ssl_context=context,
            tls_in_tls=tls_in_tls,
        )

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/connection.py:411: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
keyfile = None, certfile = None, cert_reqs = None
ca_certs = '/home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/certifi/cacert.pem'
server_hostname = 'sparc.biolucida.net', ssl_version = None, ciphers = None
ssl_context = <ssl.SSLContext object at 0x7fdbb4d06a40>, ca_cert_dir = None
key_password = None, ca_cert_data = None, tls_in_tls = False

    def ssl_wrap_socket(
        sock,
        keyfile=None,
        certfile=None,
        cert_reqs=None,
        ca_certs=None,
        server_hostname=None,
        ssl_version=None,
        ciphers=None,
        ssl_context=None,
        ca_cert_dir=None,
        key_password=None,
        ca_cert_data=None,
        tls_in_tls=False,
    ):
        """
        All arguments except for server_hostname, ssl_context, and ca_cert_dir have
        the same meaning as they do when using :func:`ssl.wrap_socket`.
    
        :param server_hostname:
            When SNI is supported, the expected hostname of the certificate
        :param ssl_context:
            A pre-made :class:`SSLContext` object. If none is provided, one will
            be created using :func:`create_urllib3_context`.
        :param ciphers:
            A string of ciphers we wish the client to support.
        :param ca_cert_dir:
            A directory containing CA certificates in multiple separate files, as
            supported by OpenSSL's -CApath flag or the capath argument to
            SSLContext.load_verify_locations().
        :param key_password:
            Optional password if the keyfile is encrypted.
        :param ca_cert_data:
            Optional string containing CA certificates in PEM format suitable for
            passing as the cadata parameter to SSLContext.load_verify_locations()
        :param tls_in_tls:
            Use SSLTransport to wrap the existing socket.
        """
        context = ssl_context
        if context is None:
            # Note: This branch of code and all the variables in it are no longer
            # used by urllib3 itself. We should consider deprecating and removing
            # this code.
            context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
    
        if ca_certs or ca_cert_dir or ca_cert_data:
            try:
                context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
            except (IOError, OSError) as e:
                raise SSLError(e)
    
        elif ssl_context is None and hasattr(context, "load_default_certs"):
            # try to load OS default certs; works well on Windows (require Python3.4+)
            context.load_default_certs()
    
        # Attempt to detect if we get the goofy behavior of the
        # keyfile being encrypted and OpenSSL asking for the
        # passphrase via the terminal and instead error out.
        if keyfile and key_password is None and _is_key_file_encrypted(keyfile):
            raise SSLError("Client private key is encrypted, password is required")
    
        if certfile:
            if key_password is None:
                context.load_cert_chain(certfile, keyfile)
            else:
                context.load_cert_chain(certfile, keyfile, key_password)
    
        try:
            if hasattr(context, "set_alpn_protocols"):
                context.set_alpn_protocols(ALPN_PROTOCOLS)
        except NotImplementedError:
            pass
    
        # If we detect server_hostname is an IP address then the SNI
        # extension should not be used according to RFC3546 Section 3.1
        use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
        # SecureTransport uses server_hostname in certificate verification.
        send_sni = (use_sni_hostname and HAS_SNI) or (
            IS_SECURETRANSPORT and server_hostname
        )
        # Do not warn the user if server_hostname is an invalid SNI hostname.
        if not HAS_SNI and use_sni_hostname:
            warnings.warn(
                "An HTTPS request has been made, but the SNI (Server Name "
                "Indication) extension to TLS is not available on this platform. "
                "This may cause the server to present an incorrect TLS "
                "certificate, which can cause validation failures. You can upgrade to "
                "a newer version of Python to solve this. For more information, see "
                "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
                "#ssl-warnings",
                SNIMissingWarning,
            )
    
        if send_sni:
>           ssl_sock = _ssl_wrap_socket_impl(
                sock, context, tls_in_tls, server_hostname=server_hostname
            )

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/util/ssl_.py:428: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
ssl_context = <ssl.SSLContext object at 0x7fdbb4d06a40>, tls_in_tls = False
server_hostname = 'sparc.biolucida.net'

    def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None):
        if tls_in_tls:
            if not SSLTransport:
                # Import error, ssl is not available.
                raise ProxySchemeUnsupported(
                    "TLS in TLS requires support for the 'ssl' module"
                )
    
            SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
            return SSLTransport(sock, ssl_context, server_hostname)
    
        if server_hostname:
>           return ssl_context.wrap_socket(sock, server_hostname=server_hostname)

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/urllib3/util/ssl_.py:472: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ssl.SSLContext object at 0x7fdbb4d06a40>
sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
server_side = False, do_handshake_on_connect = True, suppress_ragged_eofs = True
server_hostname = 'sparc.biolucida.net', session = None

    def wrap_socket(self, sock, server_side=False,
                    do_handshake_on_connect=True,
                    suppress_ragged_eofs=True,
                    server_hostname=None, session=None):
        # SSLSocket class handles server_hostname encoding before it calls
        # ctx._wrap_socket()
>       return self.sslsocket_class._create(
            sock=sock,
            server_side=server_side,
            do_handshake_on_connect=do_handshake_on_connect,
            suppress_ragged_eofs=suppress_ragged_eofs,
            server_hostname=server_hostname,
            context=self,
            session=session
        )

/usr/lib/python3.9/ssl.py:501: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

cls = <class 'ssl.SSLSocket'>
sock = <socket.socket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
server_side = False, do_handshake_on_connect = True, suppress_ragged_eofs = True
server_hostname = 'sparc.biolucida.net'
context = <ssl.SSLContext object at 0x7fdbb4d06a40>, session = None

    @classmethod
    def _create(cls, sock, server_side=False, do_handshake_on_connect=True,
                suppress_ragged_eofs=True, server_hostname=None,
                context=None, session=None):
        if sock.getsockopt(SOL_SOCKET, SO_TYPE) != SOCK_STREAM:
            raise NotImplementedError("only stream sockets are supported")
        if server_side:
            if server_hostname:
                raise ValueError("server_hostname can only be specified "
                                 "in client mode")
            if session is not None:
                raise ValueError("session can only be specified in "
                                 "client mode")
        if context.check_hostname and not server_hostname:
            raise ValueError("check_hostname requires server_hostname")
    
        kwargs = dict(
            family=sock.family, type=sock.type, proto=sock.proto,
            fileno=sock.fileno()
        )
        self = cls.__new__(cls, **kwargs)
        super(SSLSocket, self).__init__(**kwargs)
        sock_timeout = sock.gettimeout()
        sock.detach()
    
        self._context = context
        self._session = session
        self._closed = False
        self._sslobj = None
        self.server_side = server_side
        self.server_hostname = context._encode_hostname(server_hostname)
        self.do_handshake_on_connect = do_handshake_on_connect
        self.suppress_ragged_eofs = suppress_ragged_eofs
    
        # See if we are connected
        try:
            self.getpeername()
        except OSError as e:
            if e.errno != errno.ENOTCONN:
                raise
            connected = False
            blocking = self.getblocking()
            self.setblocking(False)
            try:
                # We are not connected so this is not supposed to block, but
                # testing revealed otherwise on macOS and Windows so we do
                # the non-blocking dance regardless. Our raise when any data
                # is found means consuming the data is harmless.
                notconn_pre_handshake_data = self.recv(1)
            except OSError as e:
                # EINVAL occurs for recv(1) on non-connected on unix sockets.
                if e.errno not in (errno.ENOTCONN, errno.EINVAL):
                    raise
                notconn_pre_handshake_data = b''
            self.setblocking(blocking)
            if notconn_pre_handshake_data:
                # This prevents pending data sent to the socket before it was
                # closed from escaping to the caller who could otherwise
                # presume it came through a successful TLS connection.
                reason = "Closed before TLS handshake with data in recv buffer."
                notconn_pre_handshake_data_error = SSLError(e.errno, reason)
                # Add the SSLError attributes that _ssl.c always adds.
                notconn_pre_handshake_data_error.reason = reason
                notconn_pre_handshake_data_error.library = None
                try:
                    self.close()
                except OSError:
                    pass
                try:
                    raise notconn_pre_handshake_data_error
                finally:
                    # Explicitly break the reference cycle.
                    notconn_pre_handshake_data_error = None
        else:
            connected = True
    
        self.settimeout(sock_timeout)  # Must come after setblocking() calls.
        self._connected = connected
        if connected:
            # create the SSL object
            try:
                self._sslobj = self._context._wrap_socket(
                    self, server_side, self.server_hostname,
                    owner=self, session=self._session,
                )
                if do_handshake_on_connect:
                    timeout = self.gettimeout()
                    if timeout == 0.0:
                        # non-blocking
                        raise ValueError("do_handshake_on_connect should not be specified for non-blocking sockets")
>                   self.do_handshake()

/usr/lib/python3.9/ssl.py:1074: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ssl.SSLSocket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
block = False

    @_sslcopydoc
    def do_handshake(self, block=False):
        self._check_connected()
        timeout = self.gettimeout()
        try:
            if timeout == 0.0 and block:
                self.settimeout(None)
>           self._sslobj.do_handshake()
E           urllib3.exceptions.ProtocolError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))

/usr/lib/python3.9/ssl.py:1343: ProtocolError

During handling of the above exception, another exception occurred:

client = <FlaskClient <Flask 'app.main'>>

    def test_neurolucida_thumbnail_dataset_221(client):
        query_string = {'datasetId': 221, 'version': 3, 'path': 'files/derivative/sub-M168/digital-traces/pCm168_AAV_Z_20x_191211_S3B_lx_IGS.xml'}
>       r = client.get('/thumbnail/neurolucida', query_string=query_string)

tests/test_thumbnails.py:27: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/werkzeug/test.py:1029: in get
    return self.open(*args, **kw)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/testing.py:222: in open
    return Client.open(
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/werkzeug/test.py:993: in open
    response = self.run_wsgi_app(environ.copy(), buffered=buffered)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/werkzeug/test.py:884: in run_wsgi_app
    rv = run_wsgi_app(self.application, environ, buffered=buffered)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/werkzeug/test.py:1119: in run_wsgi_app
    app_rv = app(environ, start_response)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/app.py:2463: in __call__
    return self.wsgi_app(environ, start_response)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/app.py:2449: in wsgi_app
    response = self.handle_exception(e)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask_cors/extension.py:161: in wrapped_function
    return cors_after_request(app.make_response(f(*args, **kwargs)))
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/app.py:1866: in handle_exception
    reraise(exc_type, exc_value, tb)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/_compat.py:39: in reraise
    raise value
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/app.py:2446: in wsgi_app
    response = self.full_dispatch_request()
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/app.py:1951: in full_dispatch_request
    rv = self.handle_user_exception(e)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask_cors/extension.py:161: in wrapped_function
    return cors_after_request(app.make_response(f(*args, **kwargs)))
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/app.py:1820: in handle_user_exception
    reraise(exc_type, exc_value, tb)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/_compat.py:39: in reraise
    raise value
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/app.py:1949: in full_dispatch_request
    rv = self.dispatch_request()
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask/app.py:1935: in dispatch_request
    return self.view_functions[rule.endpoint](**req.view_args)
app/main.py:306: in thumbnail_from_neurolucida_file
    response = requests.get(url, params=query_args)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/requests/api.py:76: in get
    return request('get', url, params=params, **kwargs)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/requests/api.py:61: in request
    return session.request(method=method, url=url, **kwargs)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/requests/sessions.py:542: in request
    resp = self.send(prep, **send_kwargs)
../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/requests/sessions.py:655: in send
    r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <requests.adapters.HTTPAdapter object at 0x7fdbb48524c0>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()

    def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
        """Sends PreparedRequest object. Returns Response object.
    
        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        """
    
        try:
            conn = self.get_connection(request.url, proxies)
        except LocationValueError as e:
            raise InvalidURL(e, request=request)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
    
        chunked = not (request.body is None or 'Content-Length' in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError as e:
                # this may raise a string formatting error.
                err = ("Invalid timeout {}. Pass a (connect, read) "
                       "timeout tuple, or a single float to set "
                       "both timeouts to the same value".format(timeout))
                raise ValueError(err)
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
            if not chunked:
                resp = conn.urlopen(
                    method=request.method,
                    url=url,
                    body=request.body,
                    headers=request.headers,
                    redirect=False,
                    assert_same_host=False,
                    preload_content=False,
                    decode_content=False,
                    retries=self.max_retries,
                    timeout=timeout
                )
    
            # Send the request.
            else:
                if hasattr(conn, 'proxy_pool'):
                    conn = conn.proxy_pool
    
                low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
    
                try:
                    low_conn.putrequest(request.method,
                                        url,
                                        skip_accept_encoding=True)
    
                    for header, value in request.headers.items():
                        low_conn.putheader(header, value)
    
                    low_conn.endheaders()
    
                    for i in request.body:
                        low_conn.send(hex(len(i))[2:].encode('utf-8'))
                        low_conn.send(b'\r\n')
                        low_conn.send(i)
                        low_conn.send(b'\r\n')
                    low_conn.send(b'0\r\n\r\n')
    
                    # Receive the response from the server
                    try:
                        # For Python 2.7, use buffering of HTTP responses
                        r = low_conn.getresponse(buffering=True)
                    except TypeError:
                        # For compatibility with Python 3.3+
                        r = low_conn.getresponse()
    
                    resp = HTTPResponse.from_httplib(
                        r,
                        pool=conn,
                        connection=low_conn,
                        preload_content=False,
                        decode_content=False
                    )
                except:
                    # If we hit any problems here, clean up the connection.
                    # Then, reraise so that we can handle the actual exception.
                    low_conn.close()
                    raise
    
        except (ProtocolError, socket.error) as err:
>           raise ConnectionError(err, request=request)
E           requests.exceptions.ConnectionError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))

../../shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/requests/adapters.py:498: ConnectionError
=============================== warnings summary ===============================
/home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/apscheduler/__init__.py:1
  /home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/apscheduler/__init__.py:1: DeprecationWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html
    from pkg_resources import get_distribution, DistributionNotFound

/home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/pkg_resources/__init__.py:2871
/home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/pkg_resources/__init__.py:2871
  /home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/pkg_resources/__init__.py:2871: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('google')`.
  Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages
    declare_namespace(pkg)

/home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/pkg_resources/__init__.py:2871
  /home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/pkg_resources/__init__.py:2871: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('google.logging')`.
  Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages
    declare_namespace(pkg)

/home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/pkg_resources/__init__.py:2350
  /home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/pkg_resources/__init__.py:2350: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('google')`.
  Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages
    declare_namespace(parent)

/home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/marshmallow/__init__.py:17
  /home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/marshmallow/__init__.py:17: DeprecationWarning: distutils Version classes are deprecated. Use packaging.version instead.
    __version_info__ = tuple(LooseVersion(__version__).version)

/home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask_marshmallow/__init__.py:34
  /home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/flask_marshmallow/__init__.py:34: DeprecationWarning: distutils Version classes are deprecated. Use packaging.version instead.
    __version_info__ = tuple(LooseVersion(__version__).version)

/home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/future/standard_library/__init__.py:65
  /home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/future/standard_library/__init__.py:65: DeprecationWarning: the imp module is deprecated in favour of importlib; see the module's documentation for alternative uses
    import imp

/home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/pennsieve/cache/cache_segment_pb2.py:19
  /home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/pennsieve/cache/cache_segment_pb2.py:19: DeprecationWarning: Call to deprecated create function FileDescriptor(). Note: Create unlinked descriptors is going to go away. Please use get/find descriptors from generated code or query the descriptor_pool.
    DESCRIPTOR = _descriptor.FileDescriptor(

/home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/pennsieve/cache/cache_segment_pb2.py:36
  /home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/pennsieve/cache/cache_segment_pb2.py:36: DeprecationWarning: Call to deprecated create function FieldDescriptor(). Note: Create unlinked descriptors is going to go away. Please use get/find descriptors from generated code or query the descriptor_pool.
    _descriptor.FieldDescriptor(

/home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/pennsieve/cache/cache_segment_pb2.py:53
  /home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/pennsieve/cache/cache_segment_pb2.py:53: DeprecationWarning: Call to deprecated create function FieldDescriptor(). Note: Create unlinked descriptors is going to go away. Please use get/find descriptors from generated code or query the descriptor_pool.
    _descriptor.FieldDescriptor(

/home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/pennsieve/cache/cache_segment_pb2.py:70
  /home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/pennsieve/cache/cache_segment_pb2.py:70: DeprecationWarning: Call to deprecated create function FieldDescriptor(). Note: Create unlinked descriptors is going to go away. Please use get/find descriptors from generated code or query the descriptor_pool.
    _descriptor.FieldDescriptor(

/home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/pennsieve/cache/cache_segment_pb2.py:29
  /home/cmiss/Jenkins/shiningpanda/jobs/c6a9534b/virtualenvs/d41d8cd9/lib/python3.9/site-packages/pennsieve/cache/cache_segment_pb2.py:29: DeprecationWarning: Call to deprecated create function Descriptor(). Note: Create unlinked descriptors is going to go away. Please use get/find descriptors from generated code or query the descriptor_pool.
    _CACHESEGMENT = _descriptor.Descriptor(

tests/test_health.py::test_request_response
  /home/cmiss/Jenkins/workspace/SPARC-API/tests/test_health.py:13: DeprecationWarning: Please use assertEqual instead.
    assert_equals("healthy", json_response.get("status"))

-- Docs: https://docs.pytest.org/en/latest/warnings.html
=========================== short test summary info ============================
FAILED tests/test_thumbnails.py::test_neurolucida_thumbnail - requests.except...
FAILED tests/test_thumbnails.py::test_neurolucida_thumbnail_dataset_221 - req...
======= 2 failed, 95 passed, 3 skipped, 14 warnings in 229.33s (0:03:49) =======
Build step 'Virtualenv Builder' marked build as failure
[Slack Notifications] found #842 as previous completed, non-aborted build
[Slack Notifications] will send OnEveryFailureNotification because build matches and user preferences allow it
Finished: FAILURE