我们从Python开源项目中,提取了以下27个代码示例,用于说明如何使用pycurl.HTTP_CODE。
def put (url, data, headers={}): """Make a PUT request to the url, using data in the message body, with the additional headers, if any""" reply = -1 # default, non-http response curl = pycurl.Curl() curl.setopt(pycurl.URL, url) if len(headers) > 0: curl.setopt(pycurl.HTTPHEADER, [k+': '+v for k,v in headers.items()]) curl.setopt(pycurl.PUT, 1) curl.setopt(pycurl.INFILESIZE, len(data)) databuffer = StringIO(data) curl.setopt(pycurl.READFUNCTION, databuffer.read) try: curl.perform() reply = curl.getinfo(pycurl.HTTP_CODE) except Exception: pass curl.close() return reply
def __init__(self, url_results): self.curls = {} for url in url_results: result = url_results[url] if not isinstance(result, tuple): body = result http_code = 200 else: body = result[0] http_code = result[1] self.curls[url] = CurlStub( result=body, infos={pycurl.HTTP_CODE: http_code}) # Use thread local storage to keep the current CurlStub since # CurlManyStub is passed to multiple threads, but the state needs to be # local. self._local = local() self._local.current = None
def fetch(self, request, **kwargs): """Executes an HTTPRequest, returning an HTTPResponse. If an error occurs during the fetch, we raise an HTTPError. """ if not isinstance(request, HTTPRequest): request = HTTPRequest(url=request, **kwargs) buffer = cStringIO.StringIO() headers = httputil.HTTPHeaders() try: _curl_setup_request(self._curl, request, buffer, headers) self._curl.perform() code = self._curl.getinfo(pycurl.HTTP_CODE) effective_url = self._curl.getinfo(pycurl.EFFECTIVE_URL) buffer.seek(0) response = HTTPResponse( request=request, code=code, headers=headers, buffer=buffer, effective_url=effective_url) if code < 200 or code >= 300: raise HTTPError(code, response=response) return response except pycurl.error, e: buffer.close() raise CurlError(*e)
def _finish(self, curl, curl_error=None, curl_message=None): info = curl.info curl.info = None self._multi.remove_handle(curl) self._free_list.append(curl) buffer = info["buffer"] if curl_error: error = CurlError(curl_error, curl_message) code = error.code effective_url = None buffer.close() buffer = None else: error = None code = curl.getinfo(pycurl.HTTP_CODE) effective_url = curl.getinfo(pycurl.EFFECTIVE_URL) buffer.seek(0) # the various curl timings are documented at # http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html time_info = dict( queue=info["curl_start_time"] - info["request"].start_time, namelookup=curl.getinfo(pycurl.NAMELOOKUP_TIME), connect=curl.getinfo(pycurl.CONNECT_TIME), pretransfer=curl.getinfo(pycurl.PRETRANSFER_TIME), starttransfer=curl.getinfo(pycurl.STARTTRANSFER_TIME), total=curl.getinfo(pycurl.TOTAL_TIME), redirect=curl.getinfo(pycurl.REDIRECT_TIME), ) try: info["callback"](HTTPResponse( request=info["request"], code=code, headers=info["headers"], buffer=buffer, effective_url=effective_url, error=error, reason=info['headers'].get("X-Http-Reason", None), request_time=time.time() - info["curl_start_time"], time_info=time_info)) except Exception: self.handle_callback_exception(info["callback"])
def __init__(self, result=None, infos=None, error=None): self.result = result self.infos = infos if self.infos is None: self.infos = {pycurl.HTTP_CODE: 200} self.options = {} self.performed = False self.error = error
def test_non_200_result(self): curl = CurlStub(b"result", {pycurl.HTTP_CODE: 404}) try: fetch("http://example.com", curl=curl) except HTTPCodeError as error: self.assertEqual(error.http_code, 404) self.assertEqual(error.body, b"result") else: self.fail("HTTPCodeError not raised")
def test_async_fetch_with_error(self): curl = CurlStub(b"result", {pycurl.HTTP_CODE: 501}) d = fetch_async("http://example.com/", curl=curl) def got_error(failure): self.assertEqual(failure.value.http_code, 501) self.assertEqual(failure.value.body, b"result") return failure d.addErrback(got_error) self.assertFailure(d, HTTPCodeError) return d
def _finish(self, curl, curl_error=None, curl_message=None): info = curl.info curl.info = None self._multi.remove_handle(curl) self._free_list.append(curl) buffer = info["buffer"] if curl_error: error = CurlError(curl_error, curl_message) code = error.code effective_url = None buffer.close() buffer = None else: error = None code = curl.getinfo(pycurl.HTTP_CODE) effective_url = curl.getinfo(pycurl.EFFECTIVE_URL) buffer.seek(0) # the various curl timings are documented at # http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html time_info = dict( queue=info["curl_start_time"] - info["request"].start_time, namelookup=curl.getinfo(pycurl.NAMELOOKUP_TIME), connect=curl.getinfo(pycurl.CONNECT_TIME), pretransfer=curl.getinfo(pycurl.PRETRANSFER_TIME), starttransfer=curl.getinfo(pycurl.STARTTRANSFER_TIME), total=curl.getinfo(pycurl.TOTAL_TIME), redirect=curl.getinfo(pycurl.REDIRECT_TIME), ) try: info["callback"](HTTPResponse( request=info["request"], code=code, headers=info["headers"], buffer=buffer, effective_url=effective_url, error=error, request_time=time.time() - info["curl_start_time"], time_info=time_info)) except Exception: self.handle_callback_exception(info["callback"])
def _finish(self, curl, curl_error=None, curl_message=None): info = curl.info curl.info = None self._multi.remove_handle(curl) self._free_list.append(curl) buffer = info["buffer"] if curl_error: error = CurlError(curl_error, curl_message) code = error.code body = None effective_url = None buffer.close() buffer = None else: error = None code = curl.getinfo(pycurl.HTTP_CODE) effective_url = curl.getinfo(pycurl.EFFECTIVE_URL) buffer.seek(0) try: info["callback"](HTTPResponse( request=info["request"], code=code, headers=info["headers"], buffer=buffer, effective_url=effective_url, error=error, request_time=time.time() - info["start_time"])) except (KeyboardInterrupt, SystemExit): raise except: logging.error("Exception in callback %r", info["callback"], exc_info=True)
def _finish(self, curl, curl_error=None, curl_message=None): info = curl.info curl.info = None self._multi.remove_handle(curl) self._free_list.append(curl) buffer = info["buffer"] if curl_error: error = CurlError(curl_error, curl_message) code = error.code effective_url = None buffer.close() buffer = None else: error = None code = curl.getinfo(pycurl.HTTP_CODE) effective_url = curl.getinfo(pycurl.EFFECTIVE_URL) buffer.seek(0) try: info["callback"](HTTPResponse( request=info["request"], code=code, headers=info["headers"], buffer=buffer, effective_url=effective_url, error=error, request_time=time.time() - info["start_time"])) except (KeyboardInterrupt, SystemExit): raise except: logging.error("Exception in callback %r", info["callback"], exc_info=True)
def query(url): output = io.BytesIO() query = pycurl.Curl() query.setopt(pycurl.URL, url) query.setopt(pycurl.HTTPHEADER, getHeaders()) query.setopt(pycurl.PROXY, 'localhost') query.setopt(pycurl.PROXYPORT, SOCKS_PORT) query.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5_HOSTNAME) query.setopt(pycurl.WRITEFUNCTION, output.write) through = False while not through: try: query.perform() http_code = query.getinfo(pycurl.HTTP_CODE) if http_code == 200: through = True else: # renew tor to retry print 'error httpcode:' +str(http_code) renew_tor() # time.sleep(3) except pycurl.error as exc: print "pycurl error in tor.py %s" % exc # return "Unable to reach %s (%s)" % (url, exc) return output.getvalue()
def info(c): "Return a dictionary with all info on the last response." m = {} m['effective-url'] = c.getinfo(pycurl.EFFECTIVE_URL) m['http-code'] = c.getinfo(pycurl.HTTP_CODE) m['total-time'] = c.getinfo(pycurl.TOTAL_TIME) m['namelookup-time'] = c.getinfo(pycurl.NAMELOOKUP_TIME) m['connect-time'] = c.getinfo(pycurl.CONNECT_TIME) m['pretransfer-time'] = c.getinfo(pycurl.PRETRANSFER_TIME) m['redirect-time'] = c.getinfo(pycurl.REDIRECT_TIME) m['redirect-count'] = c.getinfo(pycurl.REDIRECT_COUNT) # m['size-upload'] = c.getinfo(pycurl.SIZE_UPLOAD) m['size-download'] = c.getinfo(pycurl.SIZE_DOWNLOAD) # m['speed-upload'] = c.getinfo(pycurl.SPEED_UPLOAD) m['header-size'] = c.getinfo(pycurl.HEADER_SIZE) m['request-size'] = c.getinfo(pycurl.REQUEST_SIZE) m['content-length-download'] = c.getinfo(pycurl.CONTENT_LENGTH_DOWNLOAD) m['content-length-upload'] = c.getinfo(pycurl.CONTENT_LENGTH_UPLOAD) m['content-type'] = c.getinfo(pycurl.CONTENT_TYPE) m['response-code'] = c.getinfo(pycurl.RESPONSE_CODE) m['speed-download'] = c.getinfo(pycurl.SPEED_DOWNLOAD) # m['ssl-verifyresult'] = c.getinfo(pycurl.SSL_VERIFYRESULT) m['filetime'] = c.getinfo(pycurl.INFO_FILETIME) m['starttransfer-time'] = c.getinfo(pycurl.STARTTRANSFER_TIME) m['redirect-time'] = c.getinfo(pycurl.REDIRECT_TIME) m['redirect-count'] = c.getinfo(pycurl.REDIRECT_COUNT) m['http-connectcode'] = c.getinfo(pycurl.HTTP_CONNECTCODE) # m['httpauth-avail'] = c.getinfo(pycurl.HTTPAUTH_AVAIL) # m['proxyauth-avail'] = c.getinfo(pycurl.PROXYAUTH_AVAIL) # m['os-errno'] = c.getinfo(pycurl.OS_ERRNO) m['num-connects'] = c.getinfo(pycurl.NUM_CONNECTS) # m['ssl-engines'] = c.getinfo(pycurl.SSL_ENGINES) # m['cookielist'] = c.getinfo(pycurl.INFO_COOKIELIST) # m['lastsocket'] = c.getinfo(pycurl.LASTSOCKET) # m['ftp-entry-path'] = c.getinfo(pycurl.FTP_ENTRY_PATH) return m
def info(self): "Return a dictionary with all info on the last response." m = {} m['effective-url'] = self.handle.getinfo(pycurl.EFFECTIVE_URL) m['http-code'] = self.handle.getinfo(pycurl.HTTP_CODE) m['total-time'] = self.handle.getinfo(pycurl.TOTAL_TIME) m['namelookup-time'] = self.handle.getinfo(pycurl.NAMELOOKUP_TIME) m['connect-time'] = self.handle.getinfo(pycurl.CONNECT_TIME) m['pretransfer-time'] = self.handle.getinfo(pycurl.PRETRANSFER_TIME) m['redirect-time'] = self.handle.getinfo(pycurl.REDIRECT_TIME) m['redirect-count'] = self.handle.getinfo(pycurl.REDIRECT_COUNT) m['size-upload'] = self.handle.getinfo(pycurl.SIZE_UPLOAD) m['size-download'] = self.handle.getinfo(pycurl.SIZE_DOWNLOAD) m['speed-upload'] = self.handle.getinfo(pycurl.SPEED_UPLOAD) m['header-size'] = self.handle.getinfo(pycurl.HEADER_SIZE) m['request-size'] = self.handle.getinfo(pycurl.REQUEST_SIZE) m['content-length-download'] = self.handle.getinfo(pycurl.CONTENT_LENGTH_DOWNLOAD) m['content-length-upload'] = self.handle.getinfo(pycurl.CONTENT_LENGTH_UPLOAD) m['content-type'] = self.handle.getinfo(pycurl.CONTENT_TYPE) m['response-code'] = self.handle.getinfo(pycurl.RESPONSE_CODE) m['speed-download'] = self.handle.getinfo(pycurl.SPEED_DOWNLOAD) m['ssl-verifyresult'] = self.handle.getinfo(pycurl.SSL_VERIFYRESULT) m['filetime'] = self.handle.getinfo(pycurl.INFO_FILETIME) m['starttransfer-time'] = self.handle.getinfo(pycurl.STARTTRANSFER_TIME) m['redirect-time'] = self.handle.getinfo(pycurl.REDIRECT_TIME) m['redirect-count'] = self.handle.getinfo(pycurl.REDIRECT_COUNT) m['http-connectcode'] = self.handle.getinfo(pycurl.HTTP_CONNECTCODE) m['httpauth-avail'] = self.handle.getinfo(pycurl.HTTPAUTH_AVAIL) m['proxyauth-avail'] = self.handle.getinfo(pycurl.PROXYAUTH_AVAIL) m['os-errno'] = self.handle.getinfo(pycurl.OS_ERRNO) m['num-connects'] = self.handle.getinfo(pycurl.NUM_CONNECTS) m['ssl-engines'] = self.handle.getinfo(pycurl.SSL_ENGINES) m['cookielist'] = self.handle.getinfo(pycurl.INFO_COOKIELIST) m['lastsocket'] = self.handle.getinfo(pycurl.LASTSOCKET) m['ftp-entry-path'] = self.handle.getinfo(pycurl.FTP_ENTRY_PATH) return m
def load_url(url, token, shape=(8, 256, 256)): """ Loads a geotiff url inside a thread and returns as an ndarray """ # print("calling load_url ({})".format(url)) _, ext = os.path.splitext(urlparse(url).path) success = False for i in xrange(MAX_RETRIES): thread_id = threading.current_thread().ident _curl = _curl_pool[thread_id] _curl.setopt(_curl.URL, url) _curl.setopt(pycurl.NOSIGNAL, 1) _curl.setopt(pycurl.HTTPHEADER, ['Authorization: Bearer {}'.format(token)]) with NamedTemporaryFile(prefix="gbdxtools", suffix=ext, delete=False) as temp: # TODO: apply correct file extension _curl.setopt(_curl.WRITEDATA, temp.file) _curl.perform() code = _curl.getinfo(pycurl.HTTP_CODE) try: if(code != 200): raise TypeError("Request for {} returned unexpected error code: {}".format(url, code)) temp.file.flush() temp.close() with rasterio.open(temp.name) as dataset: arr = dataset.read() success = True return arr except (TypeError, RasterioIOError) as e: print(e) _curl.close() del _curl_pool[thread_id] finally: temp.close() os.remove(temp.name) if success is False: arr = np.zeros(shape, dtype=np.float32) return arr
def load_url(url, shape=(8, 256, 256)): """ Loads a geotiff url inside a thread and returns as an ndarray """ thread_id = threading.current_thread().ident _curl = _curl_pool[thread_id] _curl.setopt(_curl.URL, url) _curl.setopt(pycurl.NOSIGNAL, 1) _, ext = os.path.splitext(urlparse(url).path) with NamedTemporaryFile(prefix="gbdxtools", suffix="."+ext, delete=False) as temp: # TODO: apply correct file extension _curl.setopt(_curl.WRITEDATA, temp.file) _curl.perform() code = _curl.getinfo(pycurl.HTTP_CODE) try: if(code != 200): raise TypeError("Request for {} returned unexpected error code: {}".format(url, code)) temp.file.flush() temp.close() with rasterio.open(temp.name) as dataset: arr = dataset.read() except (TypeError, RasterioIOError) as e: print(e) temp.seek(0) print(temp.read()) arr = np.zeros(shape, dtype=np.uint8) _curl.close() del _curl_pool[thread_id] finally: temp.close() os.remove(temp.name) return arr
def extract_response(self, curl_error_code=None, curl_error_message=None): body = self._response_buffer.getvalue() status_code = self.curl_handler.getinfo(pycurl.HTTP_CODE) content_type = self.curl_handler.getinfo(pycurl.CONTENT_TYPE) # timings timing_infos = ( ("TOTAL_TIME", pycurl.TOTAL_TIME), ("NAMELOOKUP_TIME", pycurl.NAMELOOKUP_TIME), ("CONNECT_TIME", pycurl.CONNECT_TIME), ("APPCONNECT_TIME", pycurl.APPCONNECT_TIME), ("PRETRANSFER_TIME", pycurl.PRETRANSFER_TIME), ("STARTTRANSFER_TIME", pycurl.STARTTRANSFER_TIME), ("REDIRECT_TIME", pycurl.REDIRECT_TIME), ("REDIRECT_COUNT", pycurl.REDIRECT_COUNT) ) timings = {} for i in timing_infos: timings[i[0]] = self.curl_handler.getinfo(i[1]) # certinfo try: certinfo = self.curl_handler.getinfo(pycurl.INFO_CERTINFO) dictcertinfo = [] for cert in certinfo: d = {} for k, v in cert: d[k] = v dictcertinfo.append(d) except UnicodeDecodeError: # FIXME ? trigger by curl_handler.getinfo(...) dictcertinfo = [] # close self._response_buffer.close() self.curl_handler.close() # create object self.response = ResponseContainer(self.url, dictcertinfo, status_code, curl_error_code, curl_error_message, body, content_type, timings)
def curl_result(c): effective_url = c.getinfo(pycurl.EFFECTIVE_URL) primary_ip = c.getinfo(pycurl.PRIMARY_IP) primary_port = c.getinfo(pycurl.PRIMARY_PORT) local_ip = c.getinfo(pycurl.LOCAL_IP) local_port = c.getinfo(pycurl.LOCAL_PORT) speed_download = c.getinfo(pycurl.SPEED_DOWNLOAD) size_download = c.getinfo(pycurl.SIZE_DOWNLOAD) redirect_time = c.getinfo(pycurl.REDIRECT_TIME) redirect_count = c.getinfo(pycurl.REDIRECT_COUNT) redirect_url = c.getinfo(pycurl.REDIRECT_URL) http_code = c.getinfo(pycurl.HTTP_CODE) response_code = c.getinfo(pycurl.RESPONSE_CODE) total_time = c.getinfo(pycurl.TOTAL_TIME) content_type = c.getinfo(pycurl.CONTENT_TYPE) namelookup_time = c.getinfo(pycurl.NAMELOOKUP_TIME) info_filetime = c.getinfo(pycurl.INFO_FILETIME) http_connectcode = c.getinfo(pycurl.HTTP_CONNECTCODE) starttransfer_time = c.getinfo(pycurl.STARTTRANSFER_TIME) pretransfer_time = c.getinfo(pycurl.PRETRANSFER_TIME) header_size = c.getinfo(pycurl.HEADER_SIZE) request_size = c.getinfo(pycurl.REQUEST_SIZE) ssl_verifyresult = c.getinfo(pycurl.SSL_VERIFYRESULT) num_connects = c.getinfo(pycurl.NUM_CONNECTS) return { 'effective_url': effective_url, 'primary_ip': primary_ip, 'primary_port': primary_port, 'local_ip': local_ip, 'local_port': local_port, 'speed_download': speed_download, 'size_download': size_download, 'redirect_time': redirect_time, 'redirect_count': redirect_count, 'redirect_url': redirect_url, 'http_code': http_code, 'response_code': response_code, 'total_time': total_time, 'content_type': content_type, 'namelookup_time': namelookup_time, 'info_filetime': info_filetime, 'http_connectcode': http_connectcode, 'starttransfer_time': starttransfer_time, 'pretransfer_time': pretransfer_time, 'header_size': header_size, 'request_size': request_size, 'ssl_verifyresult': ssl_verifyresult, 'num_connects': num_connects, # 'proxy_ssl_verifyresult': proxy_ssl_verifyresult, # 'app_connecttime': app_connecttime, }