我们从Python开源项目中,提取了以下44个代码示例,用于说明如何使用pycurl.FOLLOWLOCATION。
def request(self, endpoint, post=None): buffer = BytesIO() ch = pycurl.Curl() ch.setopt(pycurl.URL, Constants.API_URL + endpoint) ch.setopt(pycurl.USERAGENT, self.userAgent) ch.setopt(pycurl.WRITEFUNCTION, buffer.write) ch.setopt(pycurl.FOLLOWLOCATION, True) ch.setopt(pycurl.HEADER, True) ch.setopt(pycurl.VERBOSE, False) ch.setopt(pycurl.COOKIEFILE, os.path.join(self.IGDataPath, self.username, self.username + "-cookies.dat")) ch.setopt(pycurl.COOKIEJAR, os.path.join(self.IGDataPath, self.username, self.username + "-cookies.dat")) if post is not None: ch.setopt(pycurl.POST, True) ch.setopt(pycurl.POSTFIELDS, post) if self.proxy: ch.setopt(pycurl.PROXY, self.proxyHost) if self.proxyAuth: ch.setopt(pycurl.PROXYUSERPWD, self.proxyAuth) ch.perform() resp = buffer.getvalue() header_len = ch.getinfo(pycurl.HEADER_SIZE) header = resp[0: header_len] body = resp[header_len:] ch.close() if self.debug: print("REQUEST: " + endpoint) if post is not None: if not isinstance(post, list): print("DATA: " + str(post)) print("RESPONSE: " + body) return [header, json_decode(body)]
def get_connection(): # pycurl initialization h = pycurl.Curl() # follow redirects h.setopt(pycurl.FOLLOWLOCATION, False) # enable compression h.setopt(pycurl.ENCODING, 'gzip, deflate') # certifi h.setopt(pycurl.CAINFO, certifi.where()) # no signal h.setopt(pycurl.NOSIGNAL, 1) # certificate informations h.setopt(pycurl.OPT_CERTINFO, 1) return h
def test_post(self): curl = CurlStub(b"result") result = fetch("http://example.com", post=True, curl=curl) self.assertEqual(result, b"result") self.assertEqual(curl.options, {pycurl.URL: b"http://example.com", pycurl.FOLLOWLOCATION: 1, pycurl.MAXREDIRS: 5, pycurl.CONNECTTIMEOUT: 30, pycurl.LOW_SPEED_LIMIT: 1, pycurl.LOW_SPEED_TIME: 600, pycurl.NOSIGNAL: 1, pycurl.WRITEFUNCTION: Any(), pycurl.POST: True, pycurl.DNS_CACHE_TIMEOUT: 0, pycurl.ENCODING: b"gzip,deflate"})
def test_post_data(self): curl = CurlStub(b"result") result = fetch("http://example.com", post=True, data="data", curl=curl) self.assertEqual(result, b"result") self.assertEqual(curl.options[pycurl.READFUNCTION](), b"data") self.assertEqual(curl.options, {pycurl.URL: b"http://example.com", pycurl.FOLLOWLOCATION: 1, pycurl.MAXREDIRS: 5, pycurl.CONNECTTIMEOUT: 30, pycurl.LOW_SPEED_LIMIT: 1, pycurl.LOW_SPEED_TIME: 600, pycurl.NOSIGNAL: 1, pycurl.WRITEFUNCTION: Any(), pycurl.POST: True, pycurl.POSTFIELDSIZE: 4, pycurl.READFUNCTION: Any(), pycurl.DNS_CACHE_TIMEOUT: 0, pycurl.ENCODING: b"gzip,deflate"})
def test_cainfo(self): curl = CurlStub(b"result") result = fetch("https://example.com", cainfo="cainfo", curl=curl) self.assertEqual(result, b"result") self.assertEqual(curl.options, {pycurl.URL: b"https://example.com", pycurl.FOLLOWLOCATION: 1, pycurl.MAXREDIRS: 5, pycurl.CONNECTTIMEOUT: 30, pycurl.LOW_SPEED_LIMIT: 1, pycurl.LOW_SPEED_TIME: 600, pycurl.NOSIGNAL: 1, pycurl.WRITEFUNCTION: Any(), pycurl.CAINFO: b"cainfo", pycurl.DNS_CACHE_TIMEOUT: 0, pycurl.ENCODING: b"gzip,deflate"})
def test_headers(self): curl = CurlStub(b"result") result = fetch("http://example.com", headers={"a": "1", "b": "2"}, curl=curl) self.assertEqual(result, b"result") self.assertEqual(curl.options, {pycurl.URL: b"http://example.com", pycurl.FOLLOWLOCATION: 1, pycurl.MAXREDIRS: 5, pycurl.CONNECTTIMEOUT: 30, pycurl.LOW_SPEED_LIMIT: 1, pycurl.LOW_SPEED_TIME: 600, pycurl.NOSIGNAL: 1, pycurl.WRITEFUNCTION: Any(), pycurl.HTTPHEADER: ["a: 1", "b: 2"], pycurl.DNS_CACHE_TIMEOUT: 0, pycurl.ENCODING: b"gzip,deflate"})
def test_pycurl_insecure(self): curl = CurlStub(b"result") result = fetch("http://example.com/get-ca-cert", curl=curl, insecure=True) self.assertEqual(result, b"result") self.assertEqual(curl.options, {pycurl.URL: b"http://example.com/get-ca-cert", pycurl.FOLLOWLOCATION: 1, pycurl.MAXREDIRS: 5, pycurl.CONNECTTIMEOUT: 30, pycurl.LOW_SPEED_LIMIT: 1, pycurl.LOW_SPEED_TIME: 600, pycurl.NOSIGNAL: 1, pycurl.WRITEFUNCTION: Any(), pycurl.SSL_VERIFYPEER: False, pycurl.DNS_CACHE_TIMEOUT: 0, pycurl.ENCODING: b"gzip,deflate"})
def initHandle(self): """ sets common options to curl handle """ self.c.setopt(pycurl.FOLLOWLOCATION, 1) self.c.setopt(pycurl.MAXREDIRS, 5) self.c.setopt(pycurl.CONNECTTIMEOUT, 30) self.c.setopt(pycurl.NOSIGNAL, 1) self.c.setopt(pycurl.NOPROGRESS, 1) if hasattr(pycurl, "AUTOREFERER"): self.c.setopt(pycurl.AUTOREFERER, 1) self.c.setopt(pycurl.SSL_VERIFYPEER, 0) self.c.setopt(pycurl.LOW_SPEED_TIME, 30) self.c.setopt(pycurl.LOW_SPEED_LIMIT, 5) #self.c.setopt(pycurl.VERBOSE, 1) self.c.setopt(pycurl.USERAGENT, "Mozilla/5.0 (Windows NT 6.1; Win64; x64;en; rv:5.0) Gecko/20110619 Firefox/5.0") if pycurl.version_info()[7]: self.c.setopt(pycurl.ENCODING, "gzip, deflate") self.c.setopt(pycurl.HTTPHEADER, ["Accept: */*", "Accept-Language: en-US,en", "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7", "Connection: keep-alive", "Keep-Alive: 300", "Expect:"])
def __init__(self, p_request, p_timeoutMs = 1000, p_curlOpts=None): if isinstance(p_request, str): p_request = HTTPRequest(p_url=p_request) self.m_request = p_request self.m_timeoutMs = p_timeoutMs self.m_response = None self.m_handle = None self.m_data = None self.m_headers = None self.m_handle = pycurl.Curl() self.m_opts = p_curlOpts if p_curlOpts is None: self.m_opts = {} self.cleanup() self._init_opt() self._init_url() self._init_method() self._init_headers() self.m_handle.setopt(pycurl.USERAGENT, self.m_request.m_agent) self.m_handle.setopt(pycurl.HEADERFUNCTION, self._read_header) if self.m_timeoutMs: self.m_handle.setopt(pycurl.TIMEOUT_MS, self.m_timeoutMs) self.m_handle.setopt(pycurl.FOLLOWLOCATION, True)
def _get_url(self, url): if self.API_TOKEN == None: logging.error('none token') # 3 For ERROR level return try: c = pycurl.Curl() c.setopt(pycurl.CAINFO, certifi.where()) c.setopt(pycurl.URL, url) b = StringIO.StringIO() c.setopt(pycurl.WRITEFUNCTION, b.write) c.setopt(pycurl.USERAGENT, "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322)") c.setopt(pycurl.HTTPHEADER, ['Authorization: JWT %s' % self.API_TOKEN.encode()]) c.setopt(pycurl.CUSTOMREQUEST, "GET") c.setopt(pycurl.FOLLOWLOCATION, 1) c.perform() result = b.getvalue() logging.debug('result') except Exception as e: logging.error(e.message) logging.error('go error') pass return result
def Curl(url,headers): while 1: try: c = pycurl.Curl() c.setopt(pycurl.REFERER, 'http://weixin.sogou.com/') c.setopt(pycurl.FOLLOWLOCATION, True) c.setopt(pycurl.MAXREDIRS,5) c.setopt(pycurl.CONNECTTIMEOUT, 60) c.setopt(pycurl.TIMEOUT,120) c.setopt(pycurl.ENCODING, 'gzip,deflate') c.fp = StringIO.StringIO() c.setopt(pycurl.URL, url) c.setopt(pycurl.HTTPHEADER,headers) c.setopt(c.WRITEFUNCTION, c.fp.write) c.perform() html = c.fp.getvalue() if '??????' in html: print u'??????,??10??' time.sleep(600) else: return html except Exception, e: print url,'curl(url)',e continue #????????
def getHtml(url,headers): c = pycurl.Curl() #??curl???????? c.setopt(pycurl.URL, url) #??????URL c.setopt(pycurl.FOLLOWLOCATION, True) #???????? c.setopt(pycurl.MAXREDIRS,5) #????????? c.setopt(pycurl.CONNECTTIMEOUT, 60) #?????? c.setopt(pycurl.TIMEOUT,120) #???? c.setopt(pycurl.ENCODING, 'gzip,deflate') #??gzip???????????????????gzip?????????gzip?????? c.fp = StringIO.StringIO() #??StringIO?? c.setopt(pycurl.HTTPHEADER,headers) #????? c.setopt(pycurl.POST, 1) #??get c.setopt(pycurl.POSTFIELDS, data) #??POST?? c.setopt(c.WRITEFUNCTION, c.fp.write) #??????? c.perform() #?? html = c.fp.getvalue() #????? return html
def curl(url, debug=False, **kwargs): while 1: try: s = StringIO.StringIO() c = pycurl.Curl() c.setopt(pycurl.URL, url) c.setopt(pycurl.REFERER, url) c.setopt(pycurl.FOLLOWLOCATION, True) c.setopt(pycurl.TIMEOUT, 60) c.setopt(pycurl.ENCODING, 'gzip') c.setopt(pycurl.USERAGENT, 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36') c.setopt(pycurl.NOSIGNAL, True) c.setopt(pycurl.WRITEFUNCTION, s.write) for k, v in kwargs.iteritems(): c.setopt(vars(pycurl)[k], v) c.perform() c.close() return s.getvalue() except: if debug: raise continue
def searchIP(self, query, pages, queue, STOP_ME): if self.API_TOKEN == None: print "please config your API_TOKEN" sys.exit() for page in range(1, pages+1): b = StringIO.StringIO() c = pycurl.Curl() c.setopt(pycurl.URL, "%s?query=%s&page=%s" % (self.API_URL, query, page)) c.setopt(pycurl.WRITEFUNCTION, b.write) c.setopt(pycurl.FOLLOWLOCATION, 1) c.setopt(pycurl.CUSTOMREQUEST, "GET") c.setopt(pycurl.HTTPHEADER, ['Authorization: JWT %s' % self.API_TOKEN.encode()]) c.perform() hosts = json.loads(b.getvalue()) for host in hosts['matches']: queue.put(host["ip"]) STOP_ME[0] = True
def get (url, user_agent=UA, referrer=None): """Make a GET request of the url using pycurl and return the data (which is None if unsuccessful)""" data = None databuffer = StringIO() curl = pycurl.Curl() curl.setopt(pycurl.URL, url) curl.setopt(pycurl.FOLLOWLOCATION, 1) curl.setopt(pycurl.CONNECTTIMEOUT, 5) curl.setopt(pycurl.TIMEOUT, 8) curl.setopt(pycurl.WRITEFUNCTION, databuffer.write) curl.setopt(pycurl.COOKIEFILE, '') if user_agent: curl.setopt(pycurl.USERAGENT, user_agent) if referrer is not None: curl.setopt(pycurl.REFERER, referrer) try: curl.perform() data = databuffer.getvalue() except Exception: pass curl.close() return data
def request(self, endpoint, headers=None, post=None, first=True): buffer = BytesIO() ch = pycurl.Curl() ch.setopt(pycurl.URL, endpoint) ch.setopt(pycurl.USERAGENT, self.userAgent) ch.setopt(pycurl.WRITEFUNCTION, buffer.write) ch.setopt(pycurl.FOLLOWLOCATION, True) ch.setopt(pycurl.HEADER, True) if headers: ch.setopt(pycurl.HTTPHEADER, headers) ch.setopt(pycurl.VERBOSE, self.debug) ch.setopt(pycurl.SSL_VERIFYPEER, False) ch.setopt(pycurl.SSL_VERIFYHOST, False) ch.setopt(pycurl.COOKIEFILE, self.settingsPath + self.username + '-cookies.dat') ch.setopt(pycurl.COOKIEJAR, self.settingsPath + self.username + '-cookies.dat') if post: import urllib ch.setopt(pycurl.POST, len(post)) ch.setopt(pycurl.POSTFIELDS, urllib.urlencode(post)) ch.perform() resp = buffer.getvalue() header_len = ch.getinfo(pycurl.HEADER_SIZE) header = resp[0: header_len] body = resp[header_len:] ch.close() if self.debug: import urllib print("REQUEST: " + endpoint) if post is not None: if not isinstance(post, list): print('DATA: ' + urllib.unquote_plus(json.dumps(post))) print("RESPONSE: " + body + "\n") return [header, json_decode(body)]
def __init__(self, api_cookies, web_cookies, cipher=None, api_args={'api': '1'}, api_url='https://ceiba.ntu.edu.tw/course/f03067/app/login.php', file_url='https://ceiba.ntu.edu.tw', web_url='https://ceiba.ntu.edu.tw'): self.logger = logging.getLogger(__name__) self.curl = pycurl.Curl() self.api_cookie = ';'.join(map(lambda x: '{}={}'.format(*x), api_cookies.items())) self.web_cookie = ';'.join(map(lambda x: '{}={}'.format(*x), web_cookies.items())) self.api_args = api_args self.api_url = api_url self.file_url = file_url self.web_url = web_url self.api_cache = None self.web_cache = dict() if not cipher: tls_backend = pycurl.version_info()[5].split('/')[0] if tls_backend == 'OpenSSL': cipher = 'AES128-SHA' elif tls_backend == 'GnuTLS': cipher = 'AES128-SHA' elif tls_backend == 'NSS': cipher = 'rsa_aes_128_sha' else: assert False, 'TLS ?? {} ????'.format(tls_backend) self.curl.setopt(pycurl.USE_SSL, pycurl.USESSL_ALL) self.curl.setopt(pycurl.SSL_CIPHER_LIST, cipher) self.curl.setopt(pycurl.PROTOCOLS, pycurl.PROTO_HTTPS) self.curl.setopt(pycurl.REDIR_PROTOCOLS, pycurl.PROTO_HTTPS) self.curl.setopt(pycurl.DEFAULT_PROTOCOL, 'https') self.curl.setopt(pycurl.FOLLOWLOCATION, False)
def test_basic(self): curl = CurlStub(b"result") result = fetch("http://example.com", curl=curl) self.assertEqual(result, b"result") self.assertEqual(curl.options, {pycurl.URL: b"http://example.com", pycurl.FOLLOWLOCATION: 1, pycurl.MAXREDIRS: 5, pycurl.CONNECTTIMEOUT: 30, pycurl.LOW_SPEED_LIMIT: 1, pycurl.LOW_SPEED_TIME: 600, pycurl.NOSIGNAL: 1, pycurl.WRITEFUNCTION: Any(), pycurl.DNS_CACHE_TIMEOUT: 0, pycurl.ENCODING: b"gzip,deflate"})
def test_pycurl_follow_true(self): curl = CurlStub(b"result") result = fetch("http://example.com", curl=curl, follow=True) self.assertEqual(result, b"result") self.assertEqual(1, curl.options[pycurl.FOLLOWLOCATION])
def test_pycurl_follow_false(self): curl = CurlStub(b"result") result = fetch("http://example.com", curl=curl, follow=False) self.assertEqual(result, b"result") self.assertNotIn(pycurl.FOLLOWLOCATION, curl.options.keys())
def test_create_curl(self): curls = [] def pycurl_Curl(): curl = CurlStub(b"result") curls.append(curl) return curl Curl = pycurl.Curl try: pycurl.Curl = pycurl_Curl result = fetch("http://example.com") curl = curls[0] self.assertEqual(result, b"result") self.assertEqual(curl.options, {pycurl.URL: b"http://example.com", pycurl.FOLLOWLOCATION: 1, pycurl.MAXREDIRS: 5, pycurl.CONNECTTIMEOUT: 30, pycurl.LOW_SPEED_LIMIT: 1, pycurl.LOW_SPEED_TIME: 600, pycurl.NOSIGNAL: 1, pycurl.WRITEFUNCTION: Any(), pycurl.DNS_CACHE_TIMEOUT: 0, pycurl.ENCODING: b"gzip,deflate"}) finally: pycurl.Curl = Curl
def get(self, URL, FOLLOWLOCATION = False): self.URL = URL buffer = StringIO() c = pycurl.Curl() c.setopt(c.URL, URL) c.setopt(c.WRITEDATA, buffer) c.setopt(pycurl.FOLLOWLOCATION, FOLLOWLOCATION) c.perform() c.close() return buffer.getvalue().replace('\r\n', '').replace('\n', '')
def load(self, url, get={}, post={}, referer=True, cookies=True, just_header=False, multipart=False, decode=False): """ load and returns a given page """ self.setRequestContext(url, get, post, referer, cookies, multipart) self.header = "" self.c.setopt(pycurl.HTTPHEADER, self.headers) if just_header: self.c.setopt(pycurl.FOLLOWLOCATION, 0) self.c.setopt(pycurl.NOBODY, 1) self.c.perform() rep = self.header self.c.setopt(pycurl.FOLLOWLOCATION, 1) self.c.setopt(pycurl.NOBODY, 0) else: self.c.perform() rep = self.getResponse() self.c.setopt(pycurl.POSTFIELDS, "") self.lastEffectiveURL = self.c.getinfo(pycurl.EFFECTIVE_URL) self.code = self.verifyHeader() self.addCookies() if decode: rep = self.decodeResponse(rep) return rep
def handle_request(self): curl_handle = pycurl.Curl() # set default options. curl_handle.setopt(pycurl.URL, self.request_url) curl_handle.setopt(pycurl.REFERER, self.request_url) curl_handle.setopt(pycurl.USERAGENT, self.useragent) curl_handle.setopt(pycurl.TIMEOUT, self.curlopts['TIMEOUT']) curl_handle.setopt(pycurl.CONNECTTIMEOUT, self.curlopts['CONNECTTIMEOUT']) curl_handle.setopt(pycurl.HEADER, True) #curl_handle.setopt(pycurl.VERBOSE, 1) curl_handle.setopt(pycurl.FOLLOWLOCATION, 1) curl_handle.setopt(pycurl.MAXREDIRS, 5) if(self.request_headers and len(self.request_headers) > 0): tmplist = list() for(key, value) in self.request_headers.items(): tmplist.append(key + ':' + value) curl_handle.setopt(pycurl.HTTPHEADER, tmplist) #??????POST curl_handle.setopt(pycurl.HTTPPROXYTUNNEL, 1) curl_handle.setopt(pycurl.POSTFIELDS, self.request_body) response = StringIO.StringIO() curl_handle.setopt(pycurl.WRITEFUNCTION, response.write) try: curl_handle.perform() except pycurl.error as error: raise ChannelException(error, 5) self.response_code = curl_handle.getinfo(curl_handle.HTTP_CODE) header_size = curl_handle.getinfo(curl_handle.HEADER_SIZE) resp_str = response.getvalue() self.response_headers = resp_str[0 : header_size] self.response_body = resp_str[header_size : ] response.close() curl_handle.close()
def _login(self): try: c = pycurl.Curl() c.setopt(pycurl.CAINFO, certifi.where()) c.setopt(pycurl.URL, self.url) b = StringIO.StringIO() c.setopt(pycurl.USERAGENT, "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322)") c.setopt(pycurl.WRITEFUNCTION, b.write) c.setopt(pycurl.FOLLOWLOCATION, 1) c.setopt(pycurl.MAXREDIRS, 5) c.setopt(pycurl.CUSTOMREQUEST, "POST") c.setopt(pycurl.POSTFIELDS, self.post_data) c.perform() if b.getvalue(): logging.info('success login') # For INFO level self.API_TOKEN = json.loads(b.getvalue())["access_token"] self.save_token() else: logging.warning('success fail,get null result') #2 For WARNING level logging.debug(self.API_TOKEN) b.close() c.close() except pycurl.E_HTTP_POST_ERROR: logging.error(str(pycurl.E_HTTP_POST_ERROR)) except Exception as e: logging.error('please check your password or username') logging.error(e.message) #3 For ERROR level pass
def __init__(self): self.c = pycurl.Curl() self.c.setopt(pycurl.TIMEOUT, 5) self.c.setopt(pycurl.FOLLOWLOCATION, True)
def getKeyword(i):#??json try: time.sleep(1) headers = [ 'Host:fengchao.baidu.com', 'User-Agent: %s' %getUA(), 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language: zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3', 'Accept-Encoding: gzip, deflate', 'Referer: http://fengchao.baidu.com/nirvana/main.html?userid=8048066', 'Connection: keep-alive', 'COOKIE:%s' %COOKIE, ] post = urllib.urlencode({ 'params': '{"entry":"kr_station","query":"%s","querytype":1,"pageNo":1,"pageSize":300}' % keyword_list[i], 'path': 'jupiter/GET/kr/word', 'token': TOKEN, 'userid': USERID, }) url = 'http://fengchao.baidu.com/nirvana/request.ajax?path=jupiter/GET/kr/word' c = pycurl.Curl() # c.setopt(pycurl.PROXY, getRandomAlbIp()) c.setopt(pycurl.URL, url) c.setopt(pycurl.FOLLOWLOCATION, True) c.setopt(pycurl.MAXREDIRS,5) c.setopt(pycurl.CONNECTTIMEOUT, 20) c.setopt(pycurl.TIMEOUT,20) c.setopt(pycurl.ENCODING, 'gzip,deflate') c.fp = StringIO.StringIO() c.setopt(pycurl.HTTPHEADER,headers) c.setopt(pycurl.POST, 1) c.setopt(pycurl.POSTFIELDS, post) c.setopt(c.WRITEFUNCTION, c.fp.write) c.perform() # mutex.acquire()#?? jsonData = c.fp.getvalue() analyseJsonData(i,jsonData) # mutex.release()#?? except Exception,e: print e pass
def curlRequest(self, url, headers = False, post = False, returnHeaders=True): ch = pycurl.Curl() ch.setopt(pycurl.URL, url) hdrs = [ "Host: poloniex.com", "Connection: close", "User-Agent: Mozilla/5.0 (CLI; Linux x86_64) polproxy", "accept: application/json" ] if post != False: ch.setopt(pycurl.POSTFIELDS, post) hdrs = hdrs + ["content-type: application/x-www-form-urlencoded", "content-length: " + str(len(post))] if headers != False: hdrs = hdrs + headers ch.setopt(pycurl.HTTPHEADER, hdrs) ch.setopt(pycurl.SSL_VERIFYHOST, 0) ch.setopt(pycurl.FOLLOWLOCATION, True) ch.setopt(pycurl.CONNECTTIMEOUT, 5) ch.setopt(pycurl.TIMEOUT, 5) ret = BytesIO() if returnHeaders: ch.setopt(pycurl.HEADERFUNCTION, ret.write) ch.setopt(pycurl.WRITEFUNCTION, ret.write) try: ch.perform() except: return "" ch.close() return ret.getvalue().decode("ISO-8859-1")
def init_handle(self): """ Sets common options to curl handle. """ self.setopt(pycurl.FOLLOWLOCATION, 1) self.setopt(pycurl.MAXREDIRS, 5) self.setopt(pycurl.CONNECTTIMEOUT, 30) self.setopt(pycurl.NOSIGNAL, 1) self.setopt(pycurl.NOPROGRESS, 1) if hasattr(pycurl, "AUTOREFERER"): self.setopt(pycurl.AUTOREFERER, 1) self.setopt(pycurl.SSL_VERIFYPEER, 0) # Interval for low speed, detects connection loss, but can abort dl if # hoster stalls the download self.setopt(pycurl.LOW_SPEED_TIME, 45) self.setopt(pycurl.LOW_SPEED_LIMIT, 5) # do not save the cookies self.setopt(pycurl.COOKIEFILE, '') self.setopt(pycurl.COOKIEJAR, '') # self.setopt(pycurl.VERBOSE, 1) self.setopt( pycurl.USERAGENT, 'Mozilla/5.0 (Windows NT 10.0; Win64; rv:53.0) ' 'Gecko/20100101 Firefox/53.0') if pycurl.version_info()[7]: self.setopt(pycurl.ENCODING, 'gzip,deflate') self.headers.update( {'Accept': "*/*", 'Accept-Language': "en-US,en", 'Accept-Charset': "ISO-8859-1,utf-8;q=0.7,*;q=0.7", 'Connection': "keep-alive", 'Keep-Alive': "300", 'Expect': ""})
def hosts(): a = StringIO.StringIO() # Options for PyCurl opts = ['X-HoneyDb-ApiId: ' + DB_API_ID, 'X-HoneyDb-ApiKey: ' + DB_API_KEY] c.setopt(pycurl.HTTPHEADER, (opts)) c.setopt(pycurl.FOLLOWLOCATION, 1) c.setopt(pycurl.URL, "https://riskdiscovery.com/honeydb/api/bad-hosts") c.setopt(c.WRITEDATA, a) try: c.perform() except Exception as e: print "\n[" + t.red("!") + "]Critical. An error was raised with the following message" print e os.system("clear") print "\n\n[" + t.green("+") + "]Retrieved Bad Hosts, formatting..." time.sleep(1) response_h = json.loads(a. getvalue()) pprint(response_h) format = json.dumps(response_h, indent = 2) with open('hosts.log', 'ab') as outfile: outfile.write(format) outfile.close() print "\n\nResults saved to 'hosts.log' in the current directory"
def feed(): b = StringIO.StringIO() # Options for PyCurl opts = ['X-HoneyDb-ApiId: ' + DB_API_ID, 'X-HoneyDb-ApiKey: ' + DB_API_KEY] c.setopt(pycurl.HTTPHEADER, (opts)) c.setopt(pycurl.FOLLOWLOCATION, 1) c.setopt(pycurl.URL, "https://riskdiscovery.com/honeydb/api/twitter-threat-feed") c.setopt(c.WRITEDATA, b) try: c.perform() except Exception as e: print "\n[" + t.red("!") + "]Critical. An error was raised with the following message" print e os.system("clear") print "\n\n[" + t.green("+") + "]Retrieved Threat Feed, formatting..." time.sleep(1) response_f = json.loads(b. getvalue()) pprint(response_f) format = json.dumps(response_f, indent = 2) with open('feed.log', 'ab') as outfile: outfile.write(format) outfile.close() print "\n\nResults saved to 'feed.log' in the current directory"
def _check_version(self): try: c = pycurl.Curl() c.setopt(pycurl.SSL_VERIFYPEER, 0) c.setopt(pycurl.SSL_VERIFYHOST, 0) c.setopt(pycurl.FOLLOWLOCATION, 1) data = getJsonFromURL(self.VERSION_URL, handle=c, max_attempts=3) if data: msgr.send_object(VersionInfo(data)) # except pycurl.error as e: # pass except Exception as e: logger.error('Failed checking for version updates. Unexpected error: {}'.format(e))
def check_mzURL(mz_server, file_name): '''Checks if an mzURL actually exists. mz_server should be the base URL of the server file_name is the name of the specific file (without its extension) ''' if mz_server[-1] == '/': mz_server = mz_server[:-1] # Handle to libcurl object crl = pycurl.Curl() # set some general options crl.setopt(pycurl.FOLLOWLOCATION, True) crl.setopt(pycurl.URL, str(mz_server + '/files.txt')) output = cStringIO.StringIO() crl.setopt(pycurl.WRITEFUNCTION, output.write) try: for i in range(5): #print 'check mzurl %d' % i crl.perform() if output.getvalue(): break except pycurl.error, e: return False for f in output.getvalue().splitlines(): if os.path.splitext(f)[0].lower() == file_name.lower(): return True else: return False
def __init__(self, data_file, verbose=False, **kwargs): self.file_type = 'mzurl' # strip off the final slash, if it exists if data_file[-1] == '/': data_file = data_file[:-1] # Likewise, html or other madness. if any([data_file.lower().endswith(x) for x in ['html', 'raw', 'wiff']]): data_file = ".".join(data_file.split(".")[:-1]) self.data_file = data_file # actually a URL to a file self.verbose = verbose self._scans = None # cache of scan_info results for the whole file # A string with the name and path of an appropriate temp file # (varies by platform) fd, self.cookie_file_name = tempfile.mkstemp(text=True) os.close(fd) # Handle to libcurl object self.crl = pycurl.Curl() # set some general options self.crl.setopt(pycurl.COOKIEFILE, self.cookie_file_name) self.crl.setopt(pycurl.COOKIEJAR, self.cookie_file_name) self.crl.setopt(pycurl.FOLLOWLOCATION, True) self.crl.setopt(pycurl.VERBOSE, verbose) self.output = cStringIO.StringIO() self.crl.setopt(pycurl.WRITEFUNCTION, self.output.write) # how would you store an info file? #if os.path.exists(data_file + '.mzi'): #self._info_file = data_file + '.mzi' #info_fh = open(self._info_file) #self._info_scans = cPickle.load(info_fh) #info_fh.close() #else: #self._info_file = None
def getToken(self): user_auth = '{"username": "%s","password": "%s"}' % (self.USERNAME, self.PASSWORD) b = StringIO.StringIO() c = pycurl.Curl() c.setopt(pycurl.URL, "http://api.zoomeye.org/user/login") c.setopt(pycurl.WRITEFUNCTION, b.write) c.setopt(pycurl.FOLLOWLOCATION, 1) c.setopt(pycurl.CUSTOMREQUEST, "POST") c.setopt(pycurl.POSTFIELDS, user_auth) c.perform() ReturnData = json.loads(b.getvalue()) self.API_TOKEN = ReturnData['access_token'] b.close() c.close()
def curl_common_init(buf): handle = pycurl.Curl() handle.setopt(pycurl.WRITEDATA, buf) handle.setopt(pycurl.HEADERFUNCTION, curl_hdr) handle.setopt(pycurl.DEBUGFUNCTION, curl_debug) handle.setopt(pycurl.USERPWD, '{}:{}'.format(_g.conf._user,_g.conf._pass)) handle.setopt(pycurl.FOLLOWLOCATION, True) # avoid FTP CWD for fastest directory transversal handle.setopt(pycurl.FTP_FILEMETHOD, pycurl.FTPMETHOD_NOCWD) # we always set this flag and let the logging module # handle filtering. handle.setopt(pycurl.VERBOSE, True) # use ipv4 for VPNs handle.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4) handle.setopt(pycurl.USE_SSL, True) handle.setopt(pycurl.SSL_VERIFYPEER, False) # XXX handle.setopt(pycurl.SSL_VERIFYHOST, 0) return handle
def update_quotes(): url = "http://finance.yahoo.com/d/quotes.csv?s={}&f=sc1p2oghl1".format( "+".join(stocks.keys()) ) buf = cStringIO.StringIO() curl = pycurl.Curl() curl.setopt(pycurl.URL, url) curl.setopt(pycurl.WRITEFUNCTION, buf.write) curl.setopt(pycurl.FOLLOWLOCATION, 1) curl.perform() data = buf.getvalue() data = data.replace('"', "") quote_lines = data.split("\n") new_quotes = [] for line in quote_lines: fields = line.split(",") if len(fields) == 7: new_quotes.append({ 'symbol': fields[0], 'change': fields[1], 'pctchange': fields[2], 'open': fields[3], 'low': fields[4], 'high': fields[5], 'last': fields[6] }) new_quotes.sort(key=lambda x: quotes_order(x)) if len(quotes) == DATA_POINTS: quotes.pop(DATA_POINTS-1) quotes.insert(0, new_quotes)
def __init__(self, handle=None, options=None): self.setCurlHandle(handle) if options == None: options = Options() self.setOptions(options) # To start with, disable FOLLOWLOCATION since we'll handle it self._handle.setopt(pycurl.FOLLOWLOCATION, False) # Force IPv4, since this class isn't yet compatible with IPv6 self._handle.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
def __init__(self, base_url="", fakeheaders=[]): self.handle = pycurl.Curl() # These members might be set. self.set_url(base_url) self.verbosity = 0 self.fakeheaders = fakeheaders # Nothing past here should be modified by the caller. self.payload = None self.payload_io = BytesIO() self.hrd = "" # Verify that we've got the right site; harmless on a non-SSL connect. self.set_option(pycurl.SSL_VERIFYHOST, 2) # Follow redirects in case it wants to take us to a CGI... self.set_option(pycurl.FOLLOWLOCATION, 1) self.set_option(pycurl.MAXREDIRS, 5) self.set_option(pycurl.NOSIGNAL, 1) # Setting this option with even a nonexistent file makes libcurl # handle cookie capture and playback automatically. self.set_option(pycurl.COOKIEFILE, "/dev/null") # Set timeouts to avoid hanging too long self.set_timeout(30) # Use password identification from .netrc automatically self.set_option(pycurl.NETRC, 1) self.set_option(pycurl.WRITEFUNCTION, self.payload_io.write) def header_callback(x): self.hdr += x.decode('ascii') self.set_option(pycurl.HEADERFUNCTION, header_callback)
def to_pycurl_object(c, req): c.setopt(pycurl.MAXREDIRS, 5) c.setopt(pycurl.WRITEFUNCTION, req.body_callback) c.setopt(pycurl.HEADERFUNCTION, req.header_callback) c.setopt(pycurl.NOSIGNAL, 1) c.setopt(pycurl.SSL_VERIFYPEER, False) c.setopt(pycurl.SSL_VERIFYHOST, 0) c.setopt(pycurl.URL,req.completeUrl) if req.getConnTimeout(): c.setopt(pycurl.CONNECTTIMEOUT, req.getConnTimeout()) if req.getTotalTimeout(): c.setopt(pycurl.TIMEOUT, req.getTotalTimeout()) authMethod, userpass = req.getAuth() if authMethod or userpass: if authMethod == "basic": c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC) elif authMethod == "ntlm": c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_NTLM) elif authMethod == "digest": c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST) c.setopt(pycurl.USERPWD, userpass) c.setopt(pycurl.HTTPHEADER, req.getHeaders()) if req.method == "POST": c.setopt(pycurl.POSTFIELDS, req.postdata) if req.method != "GET" and req.method != "POST": c.setopt(pycurl.CUSTOMREQUEST, req.method) if req.method == "HEAD": c.setopt(pycurl.NOBODY, True) if req.followLocation: c.setopt(pycurl.FOLLOWLOCATION, 1) proxy = req.getProxy() if proxy != None: c.setopt(pycurl.PROXY, proxy) if req.proxytype=="SOCKS5": c.setopt(pycurl.PROXYTYPE,pycurl.PROXYTYPE_SOCKS5) elif req.proxytype=="SOCKS4": c.setopt(pycurl.PROXYTYPE,pycurl.PROXYTYPE_SOCKS4) req.delHeader("Proxy-Connection") return c
def getOneGameODD(game): resultStr = '' try: oddURL = 'http://27.45.161.37:8072/phone/1x2.aspx?ID=' + str(game.soccerID) + '&an=iosQiuTan&av=5.9&from=2&lang=0&subversion=1' # print oddURL except: pass c = pycurl.Curl() c.setopt(pycurl.URL, oddURL) b = StringIO.StringIO() c.setopt(pycurl.WRITEFUNCTION, b.write) c.setopt(pycurl.FOLLOWLOCATION, 1) c.setopt(pycurl.MAXREDIRS, 5) c.perform() resultStr = b.getvalue().decode('utf8') if resultStr != '': array = resultStr.split('!') companys = [] for unit in array: # print unit.decode('utf-8') company = BetCompany() company.league = game.leauge; company.result = game.soccer company.homeSoccer = game.allHome company.friendSoccer = game.allFriend company.soccerGameId = game.soccerID unitArray = unit.split('^') try: company.companyTitle = unitArray[0].encode('utf-8') company.orignal_winOdd = float(unitArray[2]) company.orignal_drawOdd = float(unitArray[3]) company.orignal_loseOdd = float(unitArray[4]) company.winOdd = float(unitArray[5]) company.drawOdd = float(unitArray[6]) company.loseOdd = float(unitArray[7]) except IndexError as e: print e print unitArray if company.companyTitle in ['????', '10BET', 'bet 365', 'bwin', 'Interwetten', 'SB', '??', '??', '????', '????', '??','Oddset','SNAI','ManbetX']: companys.append(company) if company.companyTitle == '??': game.orignal_aomenOdd = (company.orignal_winOdd, company.orignal_drawOdd, company.orignal_loseOdd) game.now_aomenOdd = (company.winOdd, company.drawOdd, company.loseOdd) return companys else: return []
def load(self, url, get={}, post={}, referer=True, cookies=True, just_header=False, multipart=False, decode=False): """ Load and returns a given page. """ self.set_request_context(url, get, post, referer, cookies, multipart) # TODO: use http/rfc message instead self.header = "" if "header" in self.options: # TODO # print("custom header not implemented") self.setopt(pycurl.HTTPHEADER, self.options['header']) if just_header: self.setopt(pycurl.FOLLOWLOCATION, 0) self.setopt(pycurl.NOBODY, 1) # TODO: nobody= no post? # overwrite HEAD request, we want a common request type if post: self.setopt(pycurl.CUSTOMREQUEST, 'POST') else: self.setopt(pycurl.CUSTOMREQUEST, 'GET') try: self.c.perform() rep = self.header finally: self.setopt(pycurl.FOLLOWLOCATION, 1) self.setopt(pycurl.NOBODY, 0) self.unsetopt(pycurl.CUSTOMREQUEST) else: self.c.perform() rep = self.get_response() self.setopt(pycurl.POSTFIELDS, '') self.last_url = safequote(url) self.last_effective_url = self.c.getinfo(pycurl.EFFECTIVE_URL) if self.last_effective_url: self.last_url = self.last_effective_url self.code = self.verify_header() if cookies: self.parse_cookies() if decode: rep = self.decode_response(rep) return rep
def now_playing_update_thread(self, playlist_id, playlist_end, media_id, media_end, show_name): if not obplayer.Config.setting('sync_url'): return postfields = {} postfields['id'] = obplayer.Config.setting('sync_device_id') postfields['pw'] = obplayer.Config.setting('sync_device_password') postfields['playlist_id'] = playlist_id postfields['media_id'] = media_id postfields['show_name'] = show_name if playlist_end != '': postfields['playlist_end'] = int(round(playlist_end)) else: postfields['playlist_end'] = '' if media_end != '': postfields['media_end'] = int(round(media_end)) else: postfields['media_end'] = '' curl = pycurl.Curl() enc_postfields = urllib.urlencode(postfields) curl.setopt(pycurl.NOSIGNAL, 1) curl.setopt(pycurl.USERAGENT, 'OpenBroadcaster Player') curl.setopt(pycurl.URL, obplayer.Config.setting('sync_url') + '?action=now_playing') curl.setopt(pycurl.HEADER, False) curl.setopt(pycurl.POST, True) curl.setopt(pycurl.POSTFIELDS, enc_postfields) #curl.setopt(pycurl.FOLLOWLOCATION, 1) curl.setopt(pycurl.LOW_SPEED_LIMIT, 10) curl.setopt(pycurl.LOW_SPEED_TIME, 60) curl.setopt(pycurl.NOPROGRESS, 0) curl.setopt(pycurl.PROGRESSFUNCTION, self.curl_progress) try: curl.perform() except: obplayer.Log.log("exception in NowPlayingUpdate thread", 'error') obplayer.Log.log(traceback.format_exc(), 'error') curl.close() # # Request sync data from web application. # This is used by sync (with request_type='schedule') and sync_priority_broadcasts (with request_type='emerg'). # Function outputs XML response from server. #