我们从Python开源项目中,提取了以下30个代码示例,用于说明如何使用pycurl.USERAGENT。
def request(self, endpoint, post=None): buffer = BytesIO() ch = pycurl.Curl() ch.setopt(pycurl.URL, Constants.API_URL + endpoint) ch.setopt(pycurl.USERAGENT, self.userAgent) ch.setopt(pycurl.WRITEFUNCTION, buffer.write) ch.setopt(pycurl.FOLLOWLOCATION, True) ch.setopt(pycurl.HEADER, True) ch.setopt(pycurl.VERBOSE, False) ch.setopt(pycurl.COOKIEFILE, os.path.join(self.IGDataPath, self.username, self.username + "-cookies.dat")) ch.setopt(pycurl.COOKIEJAR, os.path.join(self.IGDataPath, self.username, self.username + "-cookies.dat")) if post is not None: ch.setopt(pycurl.POST, True) ch.setopt(pycurl.POSTFIELDS, post) if self.proxy: ch.setopt(pycurl.PROXY, self.proxyHost) if self.proxyAuth: ch.setopt(pycurl.PROXYUSERPWD, self.proxyAuth) ch.perform() resp = buffer.getvalue() header_len = ch.getinfo(pycurl.HEADER_SIZE) header = resp[0: header_len] body = resp[header_len:] ch.close() if self.debug: print("REQUEST: " + endpoint) if post is not None: if not isinstance(post, list): print("DATA: " + str(post)) print("RESPONSE: " + body) return [header, json_decode(body)]
def initHandle(self): """ sets common options to curl handle """ self.c.setopt(pycurl.FOLLOWLOCATION, 1) self.c.setopt(pycurl.MAXREDIRS, 5) self.c.setopt(pycurl.CONNECTTIMEOUT, 30) self.c.setopt(pycurl.NOSIGNAL, 1) self.c.setopt(pycurl.NOPROGRESS, 1) if hasattr(pycurl, "AUTOREFERER"): self.c.setopt(pycurl.AUTOREFERER, 1) self.c.setopt(pycurl.SSL_VERIFYPEER, 0) self.c.setopt(pycurl.LOW_SPEED_TIME, 30) self.c.setopt(pycurl.LOW_SPEED_LIMIT, 5) #self.c.setopt(pycurl.VERBOSE, 1) self.c.setopt(pycurl.USERAGENT, "Mozilla/5.0 (Windows NT 6.1; Win64; x64;en; rv:5.0) Gecko/20110619 Firefox/5.0") if pycurl.version_info()[7]: self.c.setopt(pycurl.ENCODING, "gzip, deflate") self.c.setopt(pycurl.HTTPHEADER, ["Accept: */*", "Accept-Language: en-US,en", "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7", "Connection: keep-alive", "Keep-Alive: 300", "Expect:"])
def __init__(self, p_request, p_timeoutMs = 1000, p_curlOpts=None): if isinstance(p_request, str): p_request = HTTPRequest(p_url=p_request) self.m_request = p_request self.m_timeoutMs = p_timeoutMs self.m_response = None self.m_handle = None self.m_data = None self.m_headers = None self.m_handle = pycurl.Curl() self.m_opts = p_curlOpts if p_curlOpts is None: self.m_opts = {} self.cleanup() self._init_opt() self._init_url() self._init_method() self._init_headers() self.m_handle.setopt(pycurl.USERAGENT, self.m_request.m_agent) self.m_handle.setopt(pycurl.HEADERFUNCTION, self._read_header) if self.m_timeoutMs: self.m_handle.setopt(pycurl.TIMEOUT_MS, self.m_timeoutMs) self.m_handle.setopt(pycurl.FOLLOWLOCATION, True)
def _get_url(self, url): if self.API_TOKEN == None: logging.error('none token') # 3 For ERROR level return try: c = pycurl.Curl() c.setopt(pycurl.CAINFO, certifi.where()) c.setopt(pycurl.URL, url) b = StringIO.StringIO() c.setopt(pycurl.WRITEFUNCTION, b.write) c.setopt(pycurl.USERAGENT, "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322)") c.setopt(pycurl.HTTPHEADER, ['Authorization: JWT %s' % self.API_TOKEN.encode()]) c.setopt(pycurl.CUSTOMREQUEST, "GET") c.setopt(pycurl.FOLLOWLOCATION, 1) c.perform() result = b.getvalue() logging.debug('result') except Exception as e: logging.error(e.message) logging.error('go error') pass return result
def curl(url, debug=False, **kwargs): while 1: try: s = StringIO.StringIO() c = pycurl.Curl() c.setopt(pycurl.URL, url) c.setopt(pycurl.REFERER, url) c.setopt(pycurl.FOLLOWLOCATION, True) c.setopt(pycurl.TIMEOUT, 60) c.setopt(pycurl.ENCODING, 'gzip') c.setopt(pycurl.USERAGENT, 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36') c.setopt(pycurl.NOSIGNAL, True) c.setopt(pycurl.WRITEFUNCTION, s.write) for k, v in kwargs.iteritems(): c.setopt(vars(pycurl)[k], v) c.perform() c.close() return s.getvalue() except: if debug: raise continue
def test_gzip(url): t = Test() c = pycurl.Curl() c.setopt(pycurl.WRITEFUNCTION,t.callback) c.setopt(pycurl.ENCODING, 'gzip') c.setopt(pycurl.URL,url) c.setopt(pycurl.USERAGENT,"User-Agent':'EMAO_OPS_MONITOR) Gecko/20091201 Firefox/3.5.6)") c.perform() TOTAL_TIME = c.getinfo(c.TOTAL_TIME) #print "????????%.2f ms" %(TOTAL_TIME*1000) return TOTAL_TIME * 1000
def download_preparing(self, pyfile): if not isinstance(pyfile.plugin.req, HTTPRequest): return connecttimeout = self.config.get('connecttimeout') maxredirs = self.config.get('maxredirs') useragent = self.config.get('useragent') if connecttimeout: pyfile.plugin.req.http.c.setopt( pycurl.CONNECTTIMEOUT, connecttimeout) if maxredirs: pyfile.plugin.req.http.c.setopt(pycurl.MAXREDIRS, maxredirs) if useragent: self.log_debug("Use custom user-agent string `%s`" % useragent) pyfile.plugin.req.http.c.setopt( pycurl.USERAGENT, encode(useragent))
def get_html(url, user_agent, refer_url): """ curl html :param url: :param user_agent: :param refer_url: :return: """ curl = pycurl.Curl() curl.setopt(pycurl.USERAGENT, user_agent) curl.setopt(pycurl.REFERER, refer_url) buffers = StringIO() curl.setopt(pycurl.URL, url) curl.setopt(pycurl.WRITEDATA, buffers) curl.perform() body = buffers.getvalue() buffers.close() curl.close() return body
def get (url, user_agent=UA, referrer=None): """Make a GET request of the url using pycurl and return the data (which is None if unsuccessful)""" data = None databuffer = StringIO() curl = pycurl.Curl() curl.setopt(pycurl.URL, url) curl.setopt(pycurl.FOLLOWLOCATION, 1) curl.setopt(pycurl.CONNECTTIMEOUT, 5) curl.setopt(pycurl.TIMEOUT, 8) curl.setopt(pycurl.WRITEFUNCTION, databuffer.write) curl.setopt(pycurl.COOKIEFILE, '') if user_agent: curl.setopt(pycurl.USERAGENT, user_agent) if referrer is not None: curl.setopt(pycurl.REFERER, referrer) try: curl.perform() data = databuffer.getvalue() except Exception: pass curl.close() return data
def request(self, endpoint, headers=None, post=None, first=True): buffer = BytesIO() ch = pycurl.Curl() ch.setopt(pycurl.URL, endpoint) ch.setopt(pycurl.USERAGENT, self.userAgent) ch.setopt(pycurl.WRITEFUNCTION, buffer.write) ch.setopt(pycurl.FOLLOWLOCATION, True) ch.setopt(pycurl.HEADER, True) if headers: ch.setopt(pycurl.HTTPHEADER, headers) ch.setopt(pycurl.VERBOSE, self.debug) ch.setopt(pycurl.SSL_VERIFYPEER, False) ch.setopt(pycurl.SSL_VERIFYHOST, False) ch.setopt(pycurl.COOKIEFILE, self.settingsPath + self.username + '-cookies.dat') ch.setopt(pycurl.COOKIEJAR, self.settingsPath + self.username + '-cookies.dat') if post: import urllib ch.setopt(pycurl.POST, len(post)) ch.setopt(pycurl.POSTFIELDS, urllib.urlencode(post)) ch.perform() resp = buffer.getvalue() header_len = ch.getinfo(pycurl.HEADER_SIZE) header = resp[0: header_len] body = resp[header_len:] ch.close() if self.debug: import urllib print("REQUEST: " + endpoint) if post is not None: if not isinstance(post, list): print('DATA: ' + urllib.unquote_plus(json.dumps(post))) print("RESPONSE: " + body + "\n") return [header, json_decode(body)]
def get_download_link(fs_id): """ ?????? :param fs_id: :return: """ curl = pycurl.Curl() curl.setopt(pycurl.USERAGENT, const.USER_AGENT) curl.setopt(pycurl.REFERER, const.PAN_REFER_URL) buffers = StringIO() request_dict = { 'channel': 'chunlei', 'timestamp': '1473685224', 'fidlist': [fs_id], 'type': 'dlink', 'web': 1, 'clienttype': 0, 'bdstoken': 'e0e895bb3ef7b0cb70899ee66b74e809', 'sign': decode_sign(parse_sign2('d76e889b6aafd3087ac3bd56f4d4053a', '3545d271c5d07ba27355d39da0c62a4ee06d2d25')) } target_url = const.PAN_API_URL + 'download?' + urllib.urlencode(request_dict) curl.setopt(pycurl.URL, target_url) curl.setopt(pycurl.WRITEDATA, buffers) curl.setopt(pycurl.COOKIEFILE, "cookie.txt") curl.perform() body = buffers.getvalue() buffers.close() curl.close() data = json.loads(body) if data['errno']: return None return data['dlink'][0]['dlink']
def test_pycurl_user_agent(self): """If provided, the user-agent is set in the request.""" curl = CurlStub(b"result") result = fetch( "http://example.com", curl=curl, user_agent="user-agent") self.assertEqual(result, b"result") self.assertEqual(b"user-agent", curl.options[pycurl.USERAGENT])
def api_response(self, action, get={}, post={}): get['action'] = action self.req.http.c.setopt(pycurl.USERAGENT, encode(self.config.get("useragent", default="Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:51.0) Gecko/20100101 Firefox/51.0", plugin="UserAgentSwitcher"))) json_data = self.load(self.API_URL, get=get, post=post) return json.loads(json_data)
def decrypt(self, pyfile): #: If we use curl as a user agent, we will get a straight redirect (no waiting!) self.req.http.c.setopt(pycurl.USERAGENT, "curl/7.42.1") #: Fetch the target URL header = self.load(self.pyfile.url, just_header=True, decode=False) target_url = header.get('location') self.links.append(target_url)
def prepare(self): #: Init self.fileid = re.match(self.__pattern__, self.pyfile.url).group('ID') set_cookie(self.req.cj, "linkcrypt.ws", "language", "en") #: Request package #: Better chance to not get those key-captchas self.req.http.c.setopt( pycurl.USERAGENT, "Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; rv:11.0) like Gecko") self.data = self.load(self.pyfile.url) self.data = self.load(self.pyfile.url)
def curl_get(self, url, refUrl=None): buf = cStringIO.StringIO() curl = pycurl.Curl() curl.setopt(curl.URL, url) curl.setopt(curl.WRITEFUNCTION, buf.write) curl.setopt(pycurl.SSL_VERIFYPEER, 0) #curl.setopt(pycurl.SSL_VERIFYHOST, 0) #curl.setopt(pycurl.HEADERFUNCTION, self.headerCookie) curl.setopt(pycurl.VERBOSE, 0) curl.setopt(pycurl.USERAGENT, 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:46.0) Gecko/20100101 Firefox/46.0') #curl.setopt(pycurl.HTTPGET,1) #curl.setopt(pycurl.COOKIE, Cookie) #curl.setopt(pycurl.POSTFIELDS, 'j_username={ngnms_user}&j_password={ngnms_password}'.format(**self.ngnms_login)) curl.setopt(pycurl.COOKIEJAR, '/htdocs/logs/py_cookie.txt') curl.setopt(pycurl.COOKIEFILE, '/htdocs/logs/py_cookie.txt') if refUrl: curl.setopt(pycurl.REFERER, refUrl) #curl.setopt(c.CONNECTTIMEOUT, 5) #curl.setopt(c.TIMEOUT, 8) curl.perform() backinfo = '' if curl.getinfo(pycurl.RESPONSE_CODE) == 200: backinfo = buf.getvalue() curl.close() return backinfo
def handle_request(self): curl_handle = pycurl.Curl() # set default options. curl_handle.setopt(pycurl.URL, self.request_url) curl_handle.setopt(pycurl.REFERER, self.request_url) curl_handle.setopt(pycurl.USERAGENT, self.useragent) curl_handle.setopt(pycurl.TIMEOUT, self.curlopts['TIMEOUT']) curl_handle.setopt(pycurl.CONNECTTIMEOUT, self.curlopts['CONNECTTIMEOUT']) curl_handle.setopt(pycurl.HEADER, True) #curl_handle.setopt(pycurl.VERBOSE, 1) curl_handle.setopt(pycurl.FOLLOWLOCATION, 1) curl_handle.setopt(pycurl.MAXREDIRS, 5) if(self.request_headers and len(self.request_headers) > 0): tmplist = list() for(key, value) in self.request_headers.items(): tmplist.append(key + ':' + value) curl_handle.setopt(pycurl.HTTPHEADER, tmplist) #??????POST curl_handle.setopt(pycurl.HTTPPROXYTUNNEL, 1) curl_handle.setopt(pycurl.POSTFIELDS, self.request_body) response = StringIO.StringIO() curl_handle.setopt(pycurl.WRITEFUNCTION, response.write) try: curl_handle.perform() except pycurl.error as error: raise ChannelException(error, 5) self.response_code = curl_handle.getinfo(curl_handle.HTTP_CODE) header_size = curl_handle.getinfo(curl_handle.HEADER_SIZE) resp_str = response.getvalue() self.response_headers = resp_str[0 : header_size] self.response_body = resp_str[header_size : ] response.close() curl_handle.close()
def _login(self): try: c = pycurl.Curl() c.setopt(pycurl.CAINFO, certifi.where()) c.setopt(pycurl.URL, self.url) b = StringIO.StringIO() c.setopt(pycurl.USERAGENT, "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322)") c.setopt(pycurl.WRITEFUNCTION, b.write) c.setopt(pycurl.FOLLOWLOCATION, 1) c.setopt(pycurl.MAXREDIRS, 5) c.setopt(pycurl.CUSTOMREQUEST, "POST") c.setopt(pycurl.POSTFIELDS, self.post_data) c.perform() if b.getvalue(): logging.info('success login') # For INFO level self.API_TOKEN = json.loads(b.getvalue())["access_token"] self.save_token() else: logging.warning('success fail,get null result') #2 For WARNING level logging.debug(self.API_TOKEN) b.close() c.close() except pycurl.E_HTTP_POST_ERROR: logging.error(str(pycurl.E_HTTP_POST_ERROR)) except Exception as e: logging.error('please check your password or username') logging.error(e.message) #3 For ERROR level pass
def init_handle(self): """ Sets common options to curl handle. """ self.setopt(pycurl.FOLLOWLOCATION, 1) self.setopt(pycurl.MAXREDIRS, 5) self.setopt(pycurl.CONNECTTIMEOUT, 30) self.setopt(pycurl.NOSIGNAL, 1) self.setopt(pycurl.NOPROGRESS, 1) if hasattr(pycurl, "AUTOREFERER"): self.setopt(pycurl.AUTOREFERER, 1) self.setopt(pycurl.SSL_VERIFYPEER, 0) # Interval for low speed, detects connection loss, but can abort dl if # hoster stalls the download self.setopt(pycurl.LOW_SPEED_TIME, 45) self.setopt(pycurl.LOW_SPEED_LIMIT, 5) # do not save the cookies self.setopt(pycurl.COOKIEFILE, '') self.setopt(pycurl.COOKIEJAR, '') # self.setopt(pycurl.VERBOSE, 1) self.setopt( pycurl.USERAGENT, 'Mozilla/5.0 (Windows NT 10.0; Win64; rv:53.0) ' 'Gecko/20100101 Firefox/53.0') if pycurl.version_info()[7]: self.setopt(pycurl.ENCODING, 'gzip,deflate') self.headers.update( {'Accept': "*/*", 'Accept-Language': "en-US,en", 'Accept-Charset': "ISO-8859-1,utf-8;q=0.7,*;q=0.7", 'Connection': "keep-alive", 'Keep-Alive': "300", 'Expect': ""})
def getBytesFromURL(url, handle=None, max_attempts=1, connect_timeout=None, timeout=None, gzip=True): if not handle: handle = pycurl.Curl() url = quote(url, safe=':/?=') b = BytesIO() handle.setopt(handle.URL, url) if connect_timeout: handle.setopt(handle.CONNECTTIMEOUT, connect_timeout) if timeout: handle.setopt(handle.TIMEOUT, timeout) handle.setopt(pycurl.USERAGENT, "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36") # handle.setopt(handle.VERBOSE, 1) if gzip: handle.setopt(handle.ENCODING, 'gzip, deflate') handle.setopt(handle.WRITEFUNCTION, b.write) attempts = 0 while attempts < max_attempts: if attempts > 0: time.sleep(2) handle.perform() if handle.getinfo(handle.RESPONSE_CODE) == 200: return b attempts += 1 msgr.send_tmsg("HTTP Code: {} while trying to retrieve URL: {}" .format(handle.getinfo(handle.RESPONSE_CODE), url), logging.WARN) return None
def list_dir(dir_name): """ ???????? :param dir_name: ?? :return: """ result = list() curl = pycurl.Curl() curl.setopt(pycurl.USERAGENT, const.USER_AGENT) curl.setopt(pycurl.REFERER, const.PAN_REFER_URL) buffers = StringIO() request_dict = { 'channel': 'chunlei', 'clienttype': 0, 'showempty': 0, 'web': 1, 'order': 'time', 'desc': 1, 'page': 1, 'num': 100, 'dir': dir_name, 'bdstoken': 'e0e895bb3ef7b0cb70899ee66b74e809' } target_url = const.PAN_API_URL + 'list?' + urllib.urlencode(request_dict) curl.setopt(pycurl.URL, target_url) curl.setopt(pycurl.WRITEDATA, buffers) curl.setopt(pycurl.COOKIEFILE, "cookie.txt") curl.perform() body = buffers.getvalue() print body buffers.close() curl.close() data = json.loads(body) if data['errno'] == 0: for a_list in data['list']: dlink = get_download_link(a_list['fs_id']) if dlink: dlink = dlink.replace('\\', '') result.append(dlink) return result
def get_dlinks(search_target, get_dlinks_only=True): """ ????url??????? :param search_target: ???? :param get_dlinks_only: ?????? :return ??????????? """ refer_url = const.REFER_URL % search_target curl = pycurl.Curl() curl.setopt(pycurl.USERAGENT, const.USER_AGENT) curl.setopt(pycurl.REFERER, refer_url) result = [] ll = 0 record_start_cursor = get_record_start_cursor(const.CURSOR_FILE) if record_start_cursor: ll = int(record_start_cursor) print('start') # ?????????????? while True: print('crawler pictures of page %d' % (ll / 30 + 1)) # ??str????? buffers = StringIO() target_url = const.API_URL % (search_target, search_target, ll) curl.setopt(pycurl.URL, target_url) curl.setopt(pycurl.WRITEDATA, buffers) curl.perform() body = buffers.getvalue() body = body.replace('null', 'None') data = eval(body) if 'data' in data: has_data = False for a_data in data['data']: obj_url = None if 'objURL' in a_data: obj_url = a_data['objURL'] if obj_url: has_data = True result.append(obj_url) if not has_data: print('no more pic') break ll += 30 else: print('no more pic') break print('done') curl.close() # ??page_num if ll: set_record_start_cursor(str(ll), const.CURSOR_FILE) for index, data in enumerate(result): result[index] = decode_url(data) if not get_dlinks_only: save_to_file(result, search_target + '.txt', const.BASE_FOLDER)
def version_update(self): if not obplayer.Config.setting('sync_url'): return obplayer.Log.log('sending player version to server: ' + obplayer.Config.version, 'sync') postfields = {} postfields['id'] = obplayer.Config.setting('sync_device_id') postfields['pw'] = obplayer.Config.setting('sync_device_password') postfields['version'] = obplayer.Config.version postfields['longitude'] = obplayer.Config.setting('location_longitude') postfields['latitude'] = obplayer.Config.setting('location_latitude') curl = pycurl.Curl() enc_postfields = urllib.urlencode(postfields) curl.setopt(pycurl.NOSIGNAL, 1) curl.setopt(pycurl.USERAGENT, 'OpenBroadcaster Player') curl.setopt(pycurl.URL, obplayer.Config.setting('sync_url') + '?action=version') curl.setopt(pycurl.HEADER, False) curl.setopt(pycurl.POST, True) curl.setopt(pycurl.POSTFIELDS, enc_postfields) curl.setopt(pycurl.LOW_SPEED_LIMIT, 10) curl.setopt(pycurl.LOW_SPEED_TIME, 60) curl.setopt(pycurl.NOPROGRESS, 0) curl.setopt(pycurl.PROGRESSFUNCTION, self.curl_progress) class CurlResponse: def __init__(self): self.buffer = u'' def __call__(self, data): self.buffer += data.decode('utf-8') curl_response = CurlResponse() curl.setopt(pycurl.WRITEFUNCTION, curl_response) try: curl.perform() except: obplayer.Log.log("exception in VersionUpdate thread", 'error') obplayer.Log.log(traceback.format_exc(), 'error') curl.close() if curl_response.buffer: version = json.loads(curl_response.buffer) obplayer.Log.log("server version reported as " + str(version), 'sync') if not self.check_min_version(version): obplayer.Log.log("minimum server version " + str(MIN_SERVER_VERSION) + " is required. Please update server software before continuing", 'error') else: obplayer.Log.log("server did not report a version number", 'warning')
def now_playing_update_thread(self, playlist_id, playlist_end, media_id, media_end, show_name): if not obplayer.Config.setting('sync_url'): return postfields = {} postfields['id'] = obplayer.Config.setting('sync_device_id') postfields['pw'] = obplayer.Config.setting('sync_device_password') postfields['playlist_id'] = playlist_id postfields['media_id'] = media_id postfields['show_name'] = show_name if playlist_end != '': postfields['playlist_end'] = int(round(playlist_end)) else: postfields['playlist_end'] = '' if media_end != '': postfields['media_end'] = int(round(media_end)) else: postfields['media_end'] = '' curl = pycurl.Curl() enc_postfields = urllib.urlencode(postfields) curl.setopt(pycurl.NOSIGNAL, 1) curl.setopt(pycurl.USERAGENT, 'OpenBroadcaster Player') curl.setopt(pycurl.URL, obplayer.Config.setting('sync_url') + '?action=now_playing') curl.setopt(pycurl.HEADER, False) curl.setopt(pycurl.POST, True) curl.setopt(pycurl.POSTFIELDS, enc_postfields) #curl.setopt(pycurl.FOLLOWLOCATION, 1) curl.setopt(pycurl.LOW_SPEED_LIMIT, 10) curl.setopt(pycurl.LOW_SPEED_TIME, 60) curl.setopt(pycurl.NOPROGRESS, 0) curl.setopt(pycurl.PROGRESSFUNCTION, self.curl_progress) try: curl.perform() except: obplayer.Log.log("exception in NowPlayingUpdate thread", 'error') obplayer.Log.log(traceback.format_exc(), 'error') curl.close() # # Request sync data from web application. # This is used by sync (with request_type='schedule') and sync_priority_broadcasts (with request_type='emerg'). # Function outputs XML response from server. #
def sync_request(self, request_type='', data=False): sync_url = obplayer.Config.setting('sync_url') if not sync_url: obplayer.Log.log("sync url is blank, skipping sync request", 'sync') return '' curl = pycurl.Curl() postfields = {} postfields['id'] = obplayer.Config.setting('sync_device_id') postfields['pw'] = obplayer.Config.setting('sync_device_password') postfields['hbuffer'] = obplayer.Config.setting('sync_buffer') if data: postfields['data'] = data enc_postfields = urllib.urlencode(postfields) curl.setopt(pycurl.NOSIGNAL, 1) curl.setopt(pycurl.USERAGENT, 'OpenBroadcaster Player') curl.setopt(pycurl.URL, sync_url + '?action=' + request_type) curl.setopt(pycurl.HEADER, False) curl.setopt(pycurl.POST, True) curl.setopt(pycurl.POSTFIELDS, enc_postfields) # some options so that it'll abort the transfer if the speed is too low (i.e., network problem) # low speed abort set to 0.01Kbytes/s for 60 seconds). curl.setopt(pycurl.LOW_SPEED_LIMIT, 10) curl.setopt(pycurl.LOW_SPEED_TIME, 60) curl.setopt(pycurl.NOPROGRESS, 0) curl.setopt(pycurl.PROGRESSFUNCTION, self.curl_progress) class CurlResponse: def __init__(self): self.buffer = u'' def __call__(self, data): self.buffer += data.decode('utf-8') curl_response = CurlResponse() curl.setopt(pycurl.WRITEFUNCTION, curl_response) try: curl.perform() #except pycurl.error as error: # (errno, errstr) = error # obplayer.Log.log('network error: ' + errstr, 'error') except: obplayer.Log.log("exception in sync " + request_type + " thread", 'error') obplayer.Log.log(traceback.format_exc(), 'error') curl.close() return curl_response.buffer # # Fetch media from web application. Saves under media directory. # media_id : id of the media we want # filename : filename to save under. #