Python urllib3.exceptions 模块,ReadTimeoutError() 实例源码

我们从Python开源项目中,提取了以下7个代码示例,用于说明如何使用urllib3.exceptions.ReadTimeoutError()

项目:felix    作者:axbaretto    | 项目源码 | 文件源码
def test_wait_for_etcd_event_conn_failed(self, m_sleep):
        self.watcher.next_etcd_index = 1
        m_resp = Mock()
        m_resp.modifiedIndex = 123
        read_timeout = etcd.EtcdConnectionFailed()
        read_timeout.cause = ReadTimeoutError(Mock(), "", "")
        other_error = etcd.EtcdConnectionFailed()
        other_error.cause = ExpectedException()
        responses = [
            read_timeout,
            other_error,
            m_resp,
        ]
        self.m_client.read.side_effect = iter(responses)
        event = self.watcher.wait_for_etcd_event()
        self.assertEqual(event, m_resp)
        self.assertEqual(m_sleep.mock_calls, [call(1)])
项目:felix    作者:axbaretto    | 项目源码 | 文件源码
def test_wait_for_etcd_event_conn_failed(self, m_sleep):
        self.watcher.next_etcd_index = 1
        m_resp = Mock()
        m_resp.modifiedIndex = 123
        read_timeout = etcd.EtcdConnectionFailed()
        read_timeout.cause = ReadTimeoutError(Mock(), "", "")
        other_error = etcd.EtcdConnectionFailed()
        other_error.cause = ExpectedException()
        responses = [
            read_timeout,
            other_error,
            m_resp,
        ]
        self.m_client.read.side_effect = iter(responses)
        event = self.watcher.wait_for_etcd_event()
        self.assertEqual(event, m_resp)
        self.assertEqual(m_sleep.mock_calls, [call(1)])
项目:assetsweeper    作者:guardian    | 项目源码 | 文件源码
def lookup_portal_item(esclient, index_name, item_id):
    """
    Returns an array of the collections that this item belongs to, according to Portal's ES index.
    :param esclient: Elastic search client object to use
    :param index_name: Name of the index to search within
    :param item_id: item ID to look for
    :return: array of collection names that this belongs to. Blank array if it does not belong.
    """
    parts = id_xplodr.match(item_id)
    query = {
        'query': {
            'filtered': {
                'filter': {
                    'term': {
                        'vidispine_id_str_ex': item_id
                    }
                }
            }
        }
    }

    wait_time = 2
    while True:
        try:
            result = esclient.search(index=index_name, doc_type='item', body=query)
            break
        except ReadTimeoutError as e:
            logger.warning(str(e))
            sleep(wait_time)
            wait_time *= 2
        except ConnectionTimeout as e:
            logger.warning(str(e))
            sleep(wait_time)
            wait_time *= 2
    hits = result['hits']['hits']
    if len(hits) == 0: raise PortalItemNotFound(item_id)

    # pprint(hits[0]['_source'])
    if not 'f___collection_str' in hits[0]['_source']:
        return None
    return hits[0]['_source']['f___collection_str']  # this is an array
项目:PDF_text_extract    作者:theemadnes    | 项目源码 | 文件源码
def perform_request(self, method, url, params=None, body=None, timeout=None, ignore=()):
        url = self.url_prefix + url
        if params:
            url = '%s?%s' % (url, urlencode(params))
        full_url = self.host + url

        start = time.time()
        try:
            kw = {}
            if timeout:
                kw['timeout'] = timeout

            # in python2 we need to make sure the url and method are not
            # unicode. Otherwise the body will be decoded into unicode too and
            # that will fail (#133, #201).
            if not isinstance(url, str):
                url = url.encode('utf-8')
            if not isinstance(method, str):
                method = method.encode('utf-8')

            response = self.pool.urlopen(method, url, body, retries=False, headers=self.headers, **kw)
            duration = time.time() - start
            raw_data = response.data.decode('utf-8')
        except UrllibSSLError as e:
            self.log_request_fail(method, full_url, body, time.time() - start, exception=e)
            raise SSLError('N/A', str(e), e)
        except ReadTimeoutError as e:
            self.log_request_fail(method, full_url, body, time.time() - start, exception=e)
            raise ConnectionTimeout('TIMEOUT', str(e), e)
        except Exception as e:
            self.log_request_fail(method, full_url, body, time.time() - start, exception=e)
            raise ConnectionError('N/A', str(e), e)

        if not (200 <= response.status < 300) and response.status not in ignore:
            self.log_request_fail(method, url, body, duration, response.status)
            self._raise_error(response.status, raw_data)

        self.log_request_success(method, full_url, url, body, response.status,
            raw_data, duration)

        return response.status, response.getheaders(), raw_data
项目:seaworthy    作者:praekeltfoundation    | 项目源码 | 文件源码
def read_by_deadline(resp, sock, deadline, n):
    """
    Read up to N bytes from a socket. If there's nothing to read, signal that
    it's closed.
    """
    time_left = deadline - get_time()
    # Avoid past-deadline special cases by setting a small timeout instead.
    sock.settimeout(max(time_left, 0.001))
    try:
        data = resp.raw.read(n)
    except (ReadTimeoutError, socket.timeout) as e:
        raise TimeoutError('Timeout waiting for container logs.')
    if len(data) == 0:
        raise SocketClosed()
    return data
项目:OpenDoor    作者:stanislav-web    | 项目源码 | 文件源码
def request(self, url):
        """
        Client request HTTP
        :param str url: request uri
        :return: urllib3.HTTPResponse
        """

        if self._HTTP_DBG_LEVEL <= self.__debug.level:
            self.__debug.debug_request(self._headers, url, self.__cfg.method)
        try:
            if self.__cfg.DEFAULT_SCAN == self.__cfg.scan:
                response = self.__pool.request(self.__cfg.method,
                                               helper.parse_url(url).path,
                                               headers=self._headers,
                                               retries=self.__cfg.retries,
                                               assert_same_host=True,
                                               redirect=False)

                self.cookies_middleware(is_accept=self.__cfg.accept_cookies, response=response)
            else:
                response = PoolManager().request(self.__cfg.method, url,
                                                 headers=self._headers,
                                                 retries=self.__cfg.retries,
                                                 assert_same_host=False,
                                                 redirect=False)
            return response

        except MaxRetryError:
            if self.__cfg.DEFAULT_SCAN == self.__cfg.scan:
                self.__tpl.warning(key='max_retry_error', url=helper.parse_url(url).path)
            pass

        except HostChangedError as error:
            self.__tpl.warning(key='host_changed_error', details=error)
            pass

        except ReadTimeoutError:
            self.__tpl.warning(key='read_timeout_error', url=url)
            pass

        except ConnectTimeoutError:
            self.__tpl.warning(key='connection_timeout_error', url=url)
            pass
项目:elasticsplunk    作者:brunotm    | 项目源码 | 文件源码
def perform_request(self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None):
        url = self.url_prefix + url
        if params:
            url = '%s?%s' % (url, urlencode(params))
        full_url = self.host + url

        start = time.time()
        try:
            kw = {}
            if timeout:
                kw['timeout'] = timeout

            # in python2 we need to make sure the url and method are not
            # unicode. Otherwise the body will be decoded into unicode too and
            # that will fail (#133, #201).
            if not isinstance(url, str):
                url = url.encode('utf-8')
            if not isinstance(method, str):
                method = method.encode('utf-8')

            if headers:
                request_headers = dict(self.headers)
                request_headers.update(headers or {})
            response = self.pool.urlopen(method, url, body, retries=False, headers=self.headers, **kw)
            duration = time.time() - start
            raw_data = response.data.decode('utf-8')
        except Exception as e:
            self.log_request_fail(method, full_url, url, body, time.time() - start, exception=e)
            if isinstance(e, UrllibSSLError):
                raise SSLError('N/A', str(e), e)
            if isinstance(e, ReadTimeoutError):
                raise ConnectionTimeout('TIMEOUT', str(e), e)
            raise ConnectionError('N/A', str(e), e)

        # raise errors based on http status codes, let the client handle those if needed
        if not (200 <= response.status < 300) and response.status not in ignore:
            self.log_request_fail(method, full_url, url, body, duration, response.status, raw_data)
            self._raise_error(response.status, raw_data)

        self.log_request_success(method, full_url, url, body, response.status,
            raw_data, duration)

        return response.status, response.getheaders(), raw_data