Python urllib2 模块,install_opener() 实例源码

我们从Python开源项目中,提取了以下48个代码示例,用于说明如何使用urllib2.install_opener()

项目:aci    作者:cpaggen    | 项目源码 | 文件源码
def getAPICCookie(ip_addr, authheader, username, password):
    url = 'http://'+ip_addr+'/api/aaaLogin.xml'

    # create 'opener' (OpenerDirector instance)
    opener = urllib2.build_opener(*handlers)
    # Install the opener.
    # Now all calls to urllib2.urlopen use our opener.
    urllib2.install_opener(opener)

    http_header["Host"]=ip_addr
    xml_string = "<aaaUser name='%s' pwd='%s'/>" % (username, password)
    req = urllib2.Request(url=url, data=xml_string, headers=http_header)

    try:
      response = urllib2.urlopen(req)
    except urllib2.URLError, e:
      print 'Failed to obtain auth cookie: %s' % (e)
      return 0
    else:
      rawcookie=response.info().getheaders('Set-Cookie')
      return rawcookie[0]
项目:Cortex-Analyzers    作者:CERT-BDF    | 项目源码 | 文件源码
def get(self, url, proxy=None):
        if proxy:
            proxy = urllib2.ProxyHandler({'http': proxy})
            opener = urllib2.build_opener(proxy)
            urllib2.install_opener(opener)

        try:
            response = urllib2.urlopen(url)
        except HTTPError, e:
            resp = e.read()
            self.status_code = e.code
        except URLError, e:
            resp = e.read()
            self.status_code = e.code
        else:
            self.status_code = response.code
            resp = response.read()

        return resp
项目:TACTIC-Handler    作者:listyque    | 项目源码 | 文件源码
def download_from_url(url):
    proxy = env_server.get_proxy()
    if proxy['enabled']:
        server = proxy['server'].replace('http://', '')
        proxy_dict = {
            'http': 'http://{login}:{pass}@{0}'.format(server, **proxy)
        }
        proxy_handler = urllib2.ProxyHandler(proxy_dict)
        auth = urllib2.HTTPBasicAuthHandler()
        opener = urllib2.build_opener(proxy_handler, auth, urllib2.HTTPHandler)
        urllib2.install_opener(opener)

    run_thread = tc.ServerThread(env_inst.ui_main)
    run_thread.kwargs = dict(url=url, timeout=1)
    run_thread.routine = urllib2.urlopen
    run_thread.run()
    result_thread = tc.treat_result(run_thread, silent=True)
    if result_thread.isFailed():
        return False
    else:
        return result_thread.result
项目:darkc0de-old-stuff    作者:tuwid    | 项目源码 | 文件源码
def ipcheck(proxy):
    try:
        pxhandle = urllib2.ProxyHandler({"http": proxy})
        opener = urllib2.build_opener(pxhandle)
        urllib2.install_opener(opener)
        myip = urllib2.urlopen('http://www.whatismyip.com/automation/n09230945.asp').read()
        xs =  re.findall(('\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}'), StripTags(myip))
        if xs[0] == myipadress or myipadress == myip:
            trans_list.append(proxy)
            print proxy[:-1],"\t- ALIVE -", timer(), "- TRANSPARENT"
        elif xs == None:
            pass
        else:
            anon_list.append(proxy)
            print proxy[:-1],"\t- ALIVE -", timer(), "- EXT-iP :",xs[0]
    except KeyboardInterrupt:
        print "\n\nCTRL+C - check temporary proxylist file\n\n"
        sys.exit(0)
    except:
        pass
项目:collectd-couchbase    作者:signalfx    | 项目源码 | 文件源码
def _api_call(url, opener):
    """
    Makes a REST call against the Couchbase API.
    Args:
    url (str): The URL to get, including endpoint
    Returns:
    list: The JSON response
    """
    try:
        urllib2.install_opener(opener)
        resp = urllib2.urlopen(url, timeout=http_timeout)
    except (urllib2.HTTPError, urllib2.URLError) as e:
        collectd.error("Error making API call (%s) %s" % (e, url))
        return None
    try:
        return json.load(resp)
    except ValueError, e:
        collectd.error("Error parsing JSON for API call (%s) %s" % (e, url))
        return None
项目:dart    作者:lmco    | 项目源码 | 文件源码
def download_vcpython27(self):
        """
        Download vcpython27 since some Windows 7 boxes have it and some don't.
        :return: None
        """

        self._prepare_for_download()

        logger.info('Beginning download of vcpython27... this may take a few minutes...')

        with open(os.path.join(DOWNLOADS_DIR, 'vcpython27.msi'), 'wb') as f:

            if self.PROXY is not None:
                opener = urllib2.build_opener(
                    urllib2.HTTPHandler(),
                    urllib2.HTTPSHandler(),
                    urllib2.ProxyHandler({'http': self.PROXY, 'https': self.PROXY})
                )
                urllib2.install_opener(opener)

            f.write(urllib2.urlopen(self.VCPYTHON27_DOWNLOAD_URL, timeout=self.DOWNLOAD_TIMEOUT).read())

        logger.debug('Download of vcpython27 complete')
项目:dart    作者:lmco    | 项目源码 | 文件源码
def download_python(self):
        """
        Download Python
        :return: None
        """

        self._prepare_for_download()

        logger.info('Beginning download of python')

        with open(os.path.join(DOWNLOADS_DIR, 'python-installer.msi'), 'wb') as f:

            if self.PROXY is not None:
                opener = urllib2.build_opener(
                    urllib2.HTTPHandler(),
                    urllib2.HTTPSHandler(),
                    urllib2.ProxyHandler({'http': self.PROXY, 'https': self.PROXY})
                )
                urllib2.install_opener(opener)

            f.write(urllib2.urlopen(self.PYTHON_DOWNLOAD_URL, timeout=self.DOWNLOAD_TIMEOUT).read())

        logger.debug('Download of python complete')
项目:mx    作者:graalvm    | 项目源码 | 文件源码
def _install_socks_proxy_opener(proxytype, proxyaddr, proxyport=None):
    """ Install a socks proxy handler so that all urllib2 requests are routed through the socks proxy. """
    try:
        import socks
        from sockshandler import SocksiPyHandler
    except ImportError:
        warn('WARNING: Failed to load PySocks module. Try installing it with `pip install PySocks`.')
        return
    if proxytype == 4:
        proxytype = socks.SOCKS4
    elif proxytype == 5:
        proxytype = socks.SOCKS5
    else:
        abort("Unknown Socks Proxy type {0}".format(proxytype))

    opener = urllib2.build_opener(SocksiPyHandler(proxytype, proxyaddr, proxyport))
    urllib2.install_opener(opener)
项目:aerospike-telemetry-agent    作者:aerospike    | 项目源码 | 文件源码
def __init__(self, url, proxy, cafile):
        self.url = url
        self.proxy = proxy
        if proxy:
            logging.info("Using HTTPS proxy: " + proxy)
            proxy_handler = urllib2.ProxyHandler({'https': proxy})
            opener = urllib2.build_opener(proxy_handler)
            urllib2.install_opener(opener)
        self.kwargs = {}
        if cafile and hasattr(ssl, "create_default_context"):
            logging.info("Using CA file: " + cafile)
            ctx = ssl.create_default_context()
            ctx.load_verify_locations(cafile = cafile)
            self.kwargs['context'] = ctx

    # given an infoMap returned by the local node, call up the home server
项目:zoomdata-tools    作者:Zoomdata    | 项目源码 | 文件源码
def __openrequest__(self, req):
    # Opens the passed in HTTP request
    if self.debug:
      print "\n----- REQUEST -----"
      handler = urllib2.HTTPSHandler(debuglevel=self.debugLevel)
      opener = urllib2.build_opener(handler)
      urllib2.install_opener(opener)
      print "- API ENDPOINT: "+req.get_full_url()
      print "- REQUEST METHOD: "+req.get_method()
      print "- AUTHORIZATION HEADER: "+req.get_header("Authorization")
      print "\n----- REQUEST DATA -----"
      print req.get_data()

    res = urllib2.urlopen(req)
    out = res.read()

    if self.debug:
      print "\n----- REQUEST INFO -----"
      print res.info()
      print "\n----- RESPONSE -----"
      print out

    return out
项目:zoomdata-tools    作者:Zoomdata    | 项目源码 | 文件源码
def __openrequest__(self, req):
    # Opens the passed in HTTP request
    if self.debug:
      print "\n----- REQUEST -----"
      handler = urllib2.HTTPSHandler(debuglevel=self.debugLevel)
      opener = urllib2.build_opener(handler)
      urllib2.install_opener(opener)
      print "- API ENDPOINT: "+req.get_full_url()
      print "- REQUEST METHOD: "+req.get_method()
      print "- AUTHORIZATION HEADER: "+req.get_header("Authorization")
      print "\n----- REQUEST DATA -----"
      print req.get_data()

    res = urllib2.urlopen(req)
    out = res.read()

    if self.debug:
      print "\n----- REQUEST INFO -----"
      print res.info()
      print "\n----- RESPONSE -----"
      print out

    return out
项目:zoomdata-tools    作者:Zoomdata    | 项目源码 | 文件源码
def __openrequest__(self, req):
    # Opens the passed in HTTP request
    if self.debug:
      print "\n----- REQUEST -----"
      handler = urllib2.HTTPSHandler(debuglevel=self.debugLevel)
      opener = urllib2.build_opener(handler)
      urllib2.install_opener(opener)
      print "- API ENDPOINT: "+req.get_full_url()
      print "- REQUEST METHOD: "+req.get_method()
      print "- AUTHORIZATION HEADER: "+req.get_header("Authorization")
      print "\n----- REQUEST DATA -----"
      print req.get_data()

    res = urllib2.urlopen(req)
    out = res.read()

    if self.debug:
      print "\n----- REQUEST INFO -----"
      print res.info()
      print "\n----- RESPONSE -----"
      print out

    return out
项目:DistributeCrawler    作者:SmallHedgehog    | 项目源码 | 文件源码
def add_proxy(self, addr, proxy_type='all',
                  user=None, password=None):
        """Add proxy"""
        if proxy_type == 'all':
            self.proxies = {
                'http': addr,
                'https': addr,
                'ftp': addr
            }
        else:
            self.proxies[proxy_type] = addr
        proxy_handler = urllib2.ProxyHandler(self.proxies)
        self.__build_opener()
        self.opener.add_handler(proxy_handler)

        if user and password:
            pwd_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
            pwd_manager.add_password(None, addr, user, password)
            proxy_auth_handler = urllib2.ProxyBasicAuthHandler(pwd_manager)
            self.opener.add_handler(proxy_auth_handler)

        urllib2.install_opener(self.opener)
项目:Belati    作者:aancw    | 项目源码 | 文件源码
def check_single_proxy_status(self, proxy_address, domain_check):
        try:
            parse = urlparse(proxy_address)
            proxy_scheme = parse.scheme
            proxy = str(parse.hostname) + ':' + str(parse.port)
            proxy_handler = urllib2.ProxyHandler({ proxy_scheme: proxy})
            opener = urllib2.build_opener(proxy_handler)
            opener.addheaders = [('User-agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36')]
            urllib2.install_opener(opener)
            req = urllib2.Request(domain_check)
            start_time = time.time()
            sock = urllib2.urlopen(req)
            end_time = time.time()
            diff_time = round(end_time - start_time, 3)
            log.console_log(Y + "{}[+] {} OK! Response Time : {}s".format(Y, proxy_address, str(diff_time), W ))
            return 'ok'
        except urllib2.HTTPError, e:
            print('Error code: ' + str(e.code))
            return e.code
        except Exception, detail:
            print('ERROR ' +  str(detail))
            return 1
项目:catchup4kodi    作者:catchup4kodi    | 项目源码 | 文件源码
def _update_opener(self):
        '''
        Builds and installs a new opener to be used by all future calls to 
        :func:`urllib2.urlopen`.
        '''
        if self._http_debug:
            http = urllib2.HTTPHandler(debuglevel=1)
        else:
            http = urllib2.HTTPHandler()

        if self._proxy:
            opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
                                          urllib2.ProxyHandler({'http': 
                                                                self._proxy}), 
                                          urllib2.HTTPBasicAuthHandler(),
                                          http)

        else:
            opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
                                          urllib2.HTTPBasicAuthHandler(),
                                          http)
        urllib2.install_opener(opener)
项目:catchup4kodi    作者:catchup4kodi    | 项目源码 | 文件源码
def _update_opener(self):
        '''
        Builds and installs a new opener to be used by all future calls to 
        :func:`urllib2.urlopen`.
        '''
        if self._http_debug:
            http = urllib2.HTTPHandler(debuglevel=1)
        else:
            http = urllib2.HTTPHandler()

        if self._proxy:
            opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
                                          urllib2.ProxyHandler({'http': 
                                                                self._proxy}), 
                                          urllib2.HTTPBasicAuthHandler(),
                                          http)

        else:
            opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
                                          urllib2.HTTPBasicAuthHandler(),
                                          http)
        urllib2.install_opener(opener)
项目:catchup4kodi    作者:catchup4kodi    | 项目源码 | 文件源码
def _update_opener(self):
        '''
        Builds and installs a new opener to be used by all future calls to 
        :func:`urllib2.urlopen`.
        '''
        if self._http_debug:
            http = urllib2.HTTPHandler(debuglevel=1)
        else:
            http = urllib2.HTTPHandler()

        if self._proxy:
            opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
                                          urllib2.ProxyHandler({'http': 
                                                                self._proxy}), 
                                          urllib2.HTTPBasicAuthHandler(),
                                          http)

        else:
            opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
                                          urllib2.HTTPBasicAuthHandler(),
                                          http)
        urllib2.install_opener(opener)
项目:catchup4kodi    作者:catchup4kodi    | 项目源码 | 文件源码
def _update_opener(self):
        '''
        Builds and installs a new opener to be used by all future calls to 
        :func:`urllib2.urlopen`.
        '''
        if self._http_debug:
            http = urllib2.HTTPHandler(debuglevel=1)
        else:
            http = urllib2.HTTPHandler()

        if self._proxy:
            opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
                                          urllib2.ProxyHandler({'http': 
                                                                self._proxy}), 
                                          urllib2.HTTPBasicAuthHandler(),
                                          http)

        else:
            opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
                                          urllib2.HTTPBasicAuthHandler(),
                                          http)
        urllib2.install_opener(opener)
项目:pipgh    作者:ffunenga    | 项目源码 | 文件源码
def authenticate(top_level_url=u'https://api.github.com'):
    try:
        if 'GH_AUTH_USER' not in os.environ:
            try:
                username =  raw_input(u'Username: ')
            except NameError:
                username =  input(u'Username: ')
        else:
            username = os.environ['GH_AUTH_USER']
        if 'GH_AUTH_PASS' not in os.environ:
            password = getpass.getpass(u'Password: ')
        else:
            password = os.environ['GH_AUTH_USER']
    except KeyboardInterrupt:
        sys.exit(u'')
    try:
        import urllib.request as urllib_alias
    except ImportError:
        import urllib2 as urllib_alias
    password_mgr = urllib_alias.HTTPPasswordMgrWithDefaultRealm()
    password_mgr.add_password(None, top_level_url, username, password)
    handler = urllib_alias.HTTPBasicAuthHandler(password_mgr)
    opener = urllib_alias.build_opener(handler)
    urllib_alias.install_opener(opener)
项目:zhengfang-xuanke    作者:xiaohuanshu    | 项目源码 | 文件源码
def open(aurl,post='',Referer=''):
    #proxy = 'http://127.0.0.1:8088'
    #opener = urllib2.build_opener( urllib2.ProxyHandler({'http':proxy}) )
    #urllib2.install_opener(opener)
    if post!='':
        test_data_urlencode = urllib.urlencode(post)
        req = urllib2.Request(url=aurl,data = test_data_urlencode)
    else:
        req = urllib2.Request(url=aurl)
    if Referer!='':
        req.add_header('Referer',Referer)
    if aspxsession!="":
        req.add_header('Cookie',aspxsession)
    res_data = urllib2.urlopen(req)
    return res_data
#????????session
项目:zhengfang-xuanke    作者:xiaohuanshu    | 项目源码 | 文件源码
def open(aurl,post='',Referer=''):
    #proxy = 'http://127.0.0.1:8088'
    #opener = urllib2.build_opener( urllib2.ProxyHandler({'http':proxy}) )
    #urllib2.install_opener(opener)
    if post!='':
        test_data_urlencode = urllib.urlencode(post)
        req = urllib2.Request(url=aurl,data = test_data_urlencode)
    else:
        req = urllib2.Request(url=aurl)
    if Referer!='':
        req.add_header('Referer',Referer)
    if aspxsession!="":
        req.add_header('Cookie',aspxsession)
    res_data = urllib2.urlopen(req)
    return res_data
#????????session
项目:autoinjection    作者:ChengWiLL    | 项目源码 | 文件源码
def error_handler(url):
    global HANDLE_ERRORS
    orig = HANDLE_ERRORS
    keepalive_handler = HTTPHandler()
    opener = urllib2.build_opener(keepalive_handler)
    urllib2.install_opener(opener)
    pos = {0: 'off', 1: 'on'}
    for i in (0, 1):
        print "  fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
        HANDLE_ERRORS = i
        try:
            fo = urllib2.urlopen(url)
            foo = fo.read()
            fo.close()
            try: status, reason = fo.status, fo.reason
            except AttributeError: status, reason = None, None
        except IOError, e:
            print "  EXCEPTION: %s" % e
            raise
        else:
            print "  status = %s, reason = %s" % (status, reason)
    HANDLE_ERRORS = orig
    hosts = keepalive_handler.open_connections()
    print "open connections:", hosts
    keepalive_handler.close_all()
项目:autoinjection    作者:ChengWiLL    | 项目源码 | 文件源码
def comp(N, url):
    print '  making %i connections to:\n  %s' % (N, url)

    sys.stdout.write('  first using the normal urllib handlers')
    # first use normal opener
    opener = urllib2.build_opener()
    urllib2.install_opener(opener)
    t1 = fetch(N, url)
    print '  TIME: %.3f s' % t1

    sys.stdout.write('  now using the keepalive handler       ')
    # now install the keepalive handler and try again
    opener = urllib2.build_opener(HTTPHandler())
    urllib2.install_opener(opener)
    t2 = fetch(N, url)
    print '  TIME: %.3f s' % t2
    print '  improvement factor: %.2f' % (t1/t2, )
项目:WeixinBot    作者:Urinx    | 项目源码 | 文件源码
def set_cookie(cookie_file):
    """
    @brief      Load cookie from file
    @param      cookie_file
    @param      user_agent
    @return     cookie, LWPCookieJar
    """
    cookie = cookielib.LWPCookieJar(cookie_file)
    try:
        cookie.load(ignore_discard=True)
    except:
        Log.error(traceback.format_exc())
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie))
    opener.addheaders = Constant.HTTP_HEADER_USERAGENT
    urllib2.install_opener(opener)
    return cookie
项目:free-rider-killer    作者:YukiSora    | 项目源码 | 文件源码
def adminLogin(username, password):
    print '--- Initializing ---'
    cj = cookielib.CookieJar()
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
    urllib2.install_opener(opener)
    print '--- Geting Cookie ---'
    link = urllib2.urlopen('http://www.baidu.com/')
    print '--- Geting Token ---'
    token = eval(urllib2.urlopen('https://passport.baidu.com/v2/api/?getapi&tpl=pp&apiver=v3&class=login').read())['data']['token']
    print "Token: " + token
    print '--- Sign In Posting ---'
    postdata = {
        'token' : token,
        'tpl' : 'pp',
        'username' : username,
        'password' : password,
    }
    sendRequest('https://passport.baidu.com/v2/api/?login', postdata)
    link.close()
    return
项目:YouPBX    作者:JoneXiong    | 项目源码 | 文件源码
def cache_resource(self, url):
        if self.proxy_url is not None:
            proxy = urllib2.ProxyHandler({'http': self.proxy_url})
            opener = urllib2.build_opener(proxy)
            urllib2.install_opener(opener)
        request = urllib2.Request(url)
        user_agent = 'Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.835.35 Safari/535.1'
        request.add_header('User-Agent', user_agent)
        handler = urllib2.urlopen(request, timeout=self.http_timeout)
        try:
            resource_type = MIME_TYPES[handler.headers.get('Content-Type')]
            if not resource_type:
                raise UnsupportedResourceFormat("Resource format not found")
        except KeyError:
            raise UnsupportedResourceFormat("Resource format not supported")
        etag = handler.headers.get('ETag')
        last_modified = handler.headers.get('Last-Modified')
        resource_key = self.get_resource_key(url)
        stream = handler.read()
        self.update_resource_params(resource_key, resource_type, etag, last_modified, stream)
        return stream, resource_type
项目:YouPBX    作者:JoneXiong    | 项目源码 | 文件源码
def _urllib2_fetch(self, uri, params, method=None):
        # install error processor to handle HTTP 201 response correctly
        if self.opener == None:
            self.opener = urllib2.build_opener(HTTPErrorProcessor)
            urllib2.install_opener(self.opener)

        if method and method == 'GET':
            uri = self._build_get_uri(uri, params)
            req = PlivoUrlRequest(uri)
        else:
            req = PlivoUrlRequest(uri, urllib.urlencode(params))
            if method and (method == 'DELETE' or method == 'PUT'):
                req.http_method = method

        authstring = base64.encodestring('%s:%s' % (self.auth_id, self.auth_token))
        authstring = authstring.replace('\n', '')
        req.add_header("Authorization", "Basic %s" % authstring)

        response = urllib2.urlopen(req)
        return response.read()
项目:YouPBX    作者:JoneXiong    | 项目源码 | 文件源码
def _urllib2_fetch(self, uri, params, method=None):
        # install error processor to handle HTTP 201 response correctly
        if self.opener == None:
            self.opener = urllib2.build_opener(HTTPErrorProcessor)
            urllib2.install_opener(self.opener)

        if method and method == 'GET':
            uri = self._build_get_uri(uri, params)
            req = PlivoUrlRequest(uri)
        else:
            req = PlivoUrlRequest(uri, urllib.urlencode(params))
            if method and (method == 'DELETE' or method == 'PUT'):
                req.http_method = method

        authstring = base64.encodestring('%s:%s' % (self.auth_id, self.auth_token))
        authstring = authstring.replace('\n', '')
        req.add_header("Authorization", "Basic %s" % authstring)

        response = urllib2.urlopen(req)
        return response.read()
项目:YouPBX    作者:JoneXiong    | 项目源码 | 文件源码
def _urllib2_fetch(self, uri, params, method=None):
        # install error processor to handle HTTP 201 response correctly
        if self.opener == None:
            self.opener = urllib2.build_opener(HTTPErrorProcessor)
            urllib2.install_opener(self.opener)

        if method and method == 'GET':
            uri = self._build_get_uri(uri, params)
            req = PlivoUrlRequest(uri)
        else:
            req = PlivoUrlRequest(uri, urllib.urlencode(params))
            if method and (method == 'DELETE' or method == 'PUT'):
                req.http_method = method

        authstring = base64.encodestring('%s:%s' % (self.auth_id, self.auth_token))
        authstring = authstring.replace('\n', '')
        req.add_header("Authorization", "Basic %s" % authstring)

        response = urllib2.urlopen(req)
        return response.read()
项目:facebook-group-scrape    作者:mchirico    | 项目源码 | 文件源码
def members(limit=2000):
    url='https://graph.facebook.com/v2.7/'+FACEBOOK_GROUP+'/members?fields=picture,name&limit=%s&access_token=%s' % (limit,TOKEN)
    user_agent = 'Mozilla/5.0 (Windows NT 6.1; Win64; x64)'
    values = {}
    headers = { 'Authorization':  BEAR}
    h = MyHTTPRedirectHandler()
    opener = urllib2.build_opener(h)
    urllib2.install_opener(opener)
    data = urllib.urlencode(values)
    json_data = ""
    try:
        req = urllib2.build_opener(h)
        req = urllib2.Request(url)
        response = urllib2.urlopen(req)
        the_page = response.read()
        json_data = json.loads(the_page)
    except:
        print("Error reading data members")
    return json_data
项目:facebook-group-scrape    作者:mchirico    | 项目源码 | 文件源码
def getJSONfromURL(url):
    user_agent = 'Mozilla/5.0 (Windows NT 6.1; Win64; x64)'
    values = {}
    headers = { 'Authorization':  BEAR}
    h = MyHTTPRedirectHandler()
    opener = urllib2.build_opener(h)
    urllib2.install_opener(opener)
    data = urllib.urlencode(values)
    json_data = ""
    try:
        req = urllib2.build_opener(h)
        req = urllib2.Request(url)
        response = urllib2.urlopen(req)
        the_page = response.read()
        json_data = json.loads(the_page)
    except:
        print("Error reading data members")
    return json_data
项目:facebook-group-scrape    作者:mchirico    | 项目源码 | 文件源码
def getJson(limit=13,since='2016-05-25',until='2016-05-26'):
    url='https://graph.facebook.com/v2.5/'+FACEBOOK_GROUP+'/feed?fields=reactions.limit(500){link,name,pic_square,type},message,name,id,created_time,permalink_url,shares,comments.limit(500){created_time,likes.limit(500),message,from,comments.limit(507){likes,message,from,created_time}},from&limit=%s&since=%s&until=%s&access_token=%s' % (limit,since,until,TOKEN)
    user_agent = 'Mozilla/5.0 (Windows NT 6.1; Win64; x64)'
    values = {}
    headers = { 'Authorization':  BEAR}
    h = MyHTTPRedirectHandler()
    opener = urllib2.build_opener(h)
    urllib2.install_opener(opener)
    data = urllib.urlencode(values)
    json_data = ""
    try:
        req = urllib2.build_opener(h)
        req = urllib2.Request(url)
        response = urllib2.urlopen(req)
        the_page = response.read()
        json_data = json.loads(the_page)
    except:
        print("Error reading data")
    return json_data
项目:lightbulb-framework    作者:lightbulb-framework    | 项目源码 | 文件源码
def __init__(self, configuration):
        self.setup(configuration)
        self.echo = None
        if "ECHO" in configuration:
            self.echo = configuration['ECHO']
        if self.proxy_scheme is not None and self.proxy_host is not None and \
                        self.proxy_port is not None:
            credentials = ""
            if self.proxy_username is not None and self.proxy_password is not None:
                credentials = self.proxy_username + ":" + self.proxy_password + "@"
            proxyDict = {
                self.proxy_scheme: self.proxy_scheme + "://" + credentials +
                                                    self.proxy_host + ":" + self.proxy_port
            }

            proxy = urllib2.ProxyHandler(proxyDict)

            if credentials != '':
                auth = urllib2.HTTPBasicAuthHandler()
                opener = urllib2.build_opener(proxy, auth, urllib2.HTTPHandler)
            else:
                opener = urllib2.build_opener(proxy)
            urllib2.install_opener(opener)
项目:Anki-Youdao    作者:megachweng    | 项目源码 | 文件源码
def totalPage(self):

        self.loadedCookies = self.loadCookies()
        if not self.loadedCookies:
            return False
        # page index start from 0 end at max-1
        req = urllib2.Request('http://dict.youdao.com/wordbook/wordlist?p=0&tags=')
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.loadedCookies))
        urllib2.install_opener(opener)
        response = urllib2.urlopen(req)
        source = response.read()
        if '????' in source:
            return False
        else:
            try:
                return int(re.search('<a href="wordlist.p=(.*).tags=" class="next-page">????</a>', source, re.M | re.I).group(1)) - 1
            except Exception:
                return 1
项目:etunexus_api    作者:etusolution    | 项目源码 | 文件源码
def _init_urllib(self, secure, debuglevel=0):
        cj = cookielib.CookieJar()
        no_proxy_support = urllib2.ProxyHandler({})
        cookie_handler = urllib2.HTTPCookieProcessor(cj)
        ctx = None
        if not secure:
            self._logger.info('[WARNING] Skip certificate verification.')
            ctx = ssl.create_default_context()
            ctx.check_hostname = False
            ctx.verify_mode = ssl.CERT_NONE
        https_handler = urllib2.HTTPSHandler(debuglevel=debuglevel, context=ctx)
        opener = urllib2.build_opener(no_proxy_support,
                                      cookie_handler,
                                      https_handler,
                                      MultipartPostHandler.MultipartPostHandler)
        opener.addheaders = [('User-agent', API_USER_AGENT)]
        urllib2.install_opener(opener)
项目:kekescan    作者:xiaoxiaoleo    | 项目源码 | 文件源码
def verify(cls, args):
        cookie = cookielib.CookieJar()
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie))
        urllib2.install_opener(opener)
        postdata = "_SESSION[login_in]=1&_SESSION[admin]=1&_SESSION[login_time]=300000000000000000000000\r\n"
        # get session
        request = urllib2.Request(args['options']['target'] + "/index.php", data=postdata)
        r = urllib2.urlopen(request)
        # login test
        request2 = urllib2.Request(args['options']['target'] + "/admin/admin.php", data=postdata)
        r = urllib2.urlopen(request2)
        content = r.read()
        if "admin_form.php?action=form_list&nav=list_order" in content:
            if "admin_main.php?nav=main" in content:
                args['success'] = True
                args['test_method'] = 'http://www.wooyun.org/bugs/wooyun-2014-059180'
                return args
        args['success'] = False
        return args
项目:zacui    作者:yoyopie    | 项目源码 | 文件源码
def index(request):
    if request.method == "GET":
        try:
            ssl._create_default_https_context = ssl._create_unverified_context

            opener = wdf_urllib.build_opener(
                wdf_urllib.HTTPCookieProcessor(CookieJar()))
            wdf_urllib.install_opener(opener)
        except:
            pass
        uuid = getUUID()
        url = 'https://login.weixin.qq.com/qrcode/' + uuid
        params = {
            't': 'webwx',
            '_': int(time.time()),
        }

        request = getRequest(url=url, data=urlencode(params))
        response = wdf_urllib.urlopen(request)
        context = {
            'uuid': uuid,
            'response': response.read(),
            'delyou': '',
            }
        return render_to_response('index.html', context)
项目:TigerHost    作者:naphatkrit    | 项目源码 | 文件源码
def setup_wsse_handler(base_url, username, password, preempt = True):
  """
  Configure urllib2 to try/use WSSE authentication, with a specific
  `username` and `password` when visiting any page that have a given
  `base_url`. Once this function has been called, all future requests
  through urllib2 should be able to handle WSSE authentication.
  """

  # Create a password manager
  passman = urllib2.HTTPPasswordMgrWithDefaultRealm()

  # Add username/password for domain defined by base_url
  passman.add_password(None, base_url, username, password)

  # Create the auth handler and install it in urllib2
  authhandler = WSSEAuthHandler(passman, preempt = preempt)
  opener = urllib2.build_opener(authhandler)
  urllib2.install_opener(opener)


# Example of how to use without handlers
项目:Eagle    作者:magerx    | 项目源码 | 文件源码
def error_handler(url):
    global HANDLE_ERRORS
    orig = HANDLE_ERRORS
    keepalive_handler = HTTPHandler()
    opener = urllib2.build_opener(keepalive_handler)
    urllib2.install_opener(opener)
    pos = {0: 'off', 1: 'on'}
    for i in (0, 1):
        print "  fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
        HANDLE_ERRORS = i
        try:
            fo = urllib2.urlopen(url)
            foo = fo.read()
            fo.close()
            try: status, reason = fo.status, fo.reason
            except AttributeError: status, reason = None, None
        except IOError, e:
            print "  EXCEPTION: %s" % e
            raise
        else:
            print "  status = %s, reason = %s" % (status, reason)
    HANDLE_ERRORS = orig
    hosts = keepalive_handler.open_connections()
    print "open connections:", ' '.join(hosts)
    keepalive_handler.close_all()
项目:Eagle    作者:magerx    | 项目源码 | 文件源码
def comp(N, url):
    print '  making %i connections to:\n  %s' % (N, url)

    sys.stdout.write('  first using the normal urllib handlers')
    # first use normal opener
    opener = urllib2.build_opener()
    urllib2.install_opener(opener)
    t1 = fetch(N, url)
    print '  TIME: %.3f s' % t1

    sys.stdout.write('  now using the keepalive handler       ')
    # now install the keepalive handler and try again
    opener = urllib2.build_opener(HTTPHandler())
    urllib2.install_opener(opener)
    t2 = fetch(N, url)
    print '  TIME: %.3f s' % t2
    print '  improvement factor: %.2f' % (t1/t2, )
项目:Eagle    作者:magerx    | 项目源码 | 文件源码
def send_common_request(url, is_post, cookie, para=''):
    """
    ?????WEB???????
    :url:       ??URL
    :is_post:   ???POST
    :cookie:    cookie
    """
    headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:42.0) Gecko/20100101 Firefox/42.0',
               'Cookie': cookie
               }
    # dns cache
    # socket.getaddrinfo = new_getaddrinfo

    try:
        encoding_support = ContentEncodingProcessor()
        opener = urllib2.build_opener(encoding_support, urllib2.HTTPHandler)
        urllib2.install_opener(opener)
        if is_post == 2:  # post
            # url, query = url.split('?', 1)
            return urllib2.urlopen(urllib2.Request(url, para, headers=headers)).read()
        else:
            return urllib2.urlopen(urllib2.Request('?'.join([url, para]), headers=headers)).read()
    except:
        return ''
项目:Helix    作者:3lackrush    | 项目源码 | 文件源码
def error_handler(url):
    global HANDLE_ERRORS
    orig = HANDLE_ERRORS
    keepalive_handler = HTTPHandler()
    opener = urllib2.build_opener(keepalive_handler)
    urllib2.install_opener(opener)
    pos = {0: 'off', 1: 'on'}
    for i in (0, 1):
        print "  fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
        HANDLE_ERRORS = i
        try:
            fo = urllib2.urlopen(url)
            foo = fo.read()
            fo.close()
            try: status, reason = fo.status, fo.reason
            except AttributeError: status, reason = None, None
        except IOError, e:
            print "  EXCEPTION: %s" % e
            raise
        else:
            print "  status = %s, reason = %s" % (status, reason)
    HANDLE_ERRORS = orig
    hosts = keepalive_handler.open_connections()
    print "open connections:", hosts
    keepalive_handler.close_all()
项目:Helix    作者:3lackrush    | 项目源码 | 文件源码
def comp(N, url):
    print '  making %i connections to:\n  %s' % (N, url)

    sys.stdout.write('  first using the normal urllib handlers')
    # first use normal opener
    opener = urllib2.build_opener()
    urllib2.install_opener(opener)
    t1 = fetch(N, url)
    print '  TIME: %.3f s' % t1

    sys.stdout.write('  now using the keepalive handler       ')
    # now install the keepalive handler and try again
    opener = urllib2.build_opener(HTTPHandler())
    urllib2.install_opener(opener)
    t2 = fetch(N, url)
    print '  TIME: %.3f s' % t2
    print '  improvement factor: %.2f' % (t1/t2, )
项目:autoscan    作者:b01u    | 项目源码 | 文件源码
def error_handler(url):
    global HANDLE_ERRORS
    orig = HANDLE_ERRORS
    keepalive_handler = HTTPHandler()
    opener = urllib2.build_opener(keepalive_handler)
    urllib2.install_opener(opener)
    pos = {0: 'off', 1: 'on'}
    for i in (0, 1):
        print "  fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
        HANDLE_ERRORS = i
        try:
            fo = urllib2.urlopen(url)
            foo = fo.read()
            fo.close()
            try: status, reason = fo.status, fo.reason
            except AttributeError: status, reason = None, None
        except IOError, e:
            print "  EXCEPTION: %s" % e
            raise
        else:
            print "  status = %s, reason = %s" % (status, reason)
    HANDLE_ERRORS = orig
    hosts = keepalive_handler.open_connections()
    print "open connections:", ' '.join(hosts)
    keepalive_handler.close_all()
项目:autoscan    作者:b01u    | 项目源码 | 文件源码
def comp(N, url):
    print '  making %i connections to:\n  %s' % (N, url)

    sys.stdout.write('  first using the normal urllib handlers')
    # first use normal opener
    opener = urllib2.build_opener()
    urllib2.install_opener(opener)
    t1 = fetch(N, url)
    print '  TIME: %.3f s' % t1

    sys.stdout.write('  now using the keepalive handler       ')
    # now install the keepalive handler and try again
    opener = urllib2.build_opener(HTTPHandler())
    urllib2.install_opener(opener)
    t2 = fetch(N, url)
    print '  TIME: %.3f s' % t2
    print '  improvement factor: %.2f' % (t1/t2, )
项目:tools    作者:okabe    | 项目源码 | 文件源码
def checker():
    while True:
        if proxyq.empty() is not True:
            proxy = "http://{}".format( proxyq.get() )
            url = "http://icanhazip.com"
            proxy_handler = urllib2.ProxyHandler( { "http" : proxy } )
            opener = urllib2.build_opener( proxy_handler )
            urllib2.install_opener( opener )
            printq.put( "[>] Trying {}".format( proxy ) )
            try:
                response = urllib2.urlopen( url, timeout=3 ).readlines()
                for line in response:
                    if line.rstrip( "\n" ) in proxy:
                        printq.put( "[+] Working proxy: {}".format( proxy ) )
                        with open( "working.txt", "a" ) as log:
                            log.write( "{}\n".format( proxy ) )
                        log.close()
            except Exception as ERROR:
                printq.put( "[!] Bad proxy: {}".format( proxy ) )
            proxyq.task_done()
项目:00scanner    作者:xiaoqin00    | 项目源码 | 文件源码
def init_options(proxy=None, cookie=None, ua=None, referer=None):
    globals()["_headers"] = dict(filter(lambda _: _[1], ((COOKIE, cookie), (UA, ua or NAME), (REFERER, referer))))
    urllib2.install_opener(urllib2.build_opener(urllib2.ProxyHandler({'http': proxy})) if proxy else None)

# if __name__ == "__main__":
#     print "%s #v%s\n by: %s\n" % (NAME, VERSION, AUTHOR)
#     parser = optparse.OptionParser(version=VERSION)
#     parser.add_option("-u", "--url", dest="url", help="Target URL (e.g. \"http://www.target.com/page.php?id=1\")")
#     parser.add_option("--data", dest="data", help="POST data (e.g. \"query=test\")")
#     parser.add_option("--cookie", dest="cookie", help="HTTP Cookie header value")
#     parser.add_option("--user-agent", dest="ua", help="HTTP User-Agent header value")
#     parser.add_option("--referer", dest="referer", help="HTTP Referer header value")
#     parser.add_option("--proxy", dest="proxy", help="HTTP proxy address (e.g. \"http://127.0.0.1:8080\")")
#     options, _ = parser.parse_args()
#     if options.url:
#         init_options(options.proxy, options.cookie, options.ua, options.referer)
#         result = scan_page(options.url if options.url.startswith("http") else "http://%s" % options.url, options.data)
#         print "\nscan results: %s vulnerabilities found" % ("possible" if result else "no")
#     else:
#         parser.print_help()
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def setUp(self):
        mechanize._testcase.TestCase.setUp(self)
        self.test_uri = urljoin(self.uri, "test_fixtures")
        self.server = self.get_cached_fixture("server")
        if self.no_proxies:
            old_opener_m = mechanize._opener._opener
            old_opener_u = urllib2._opener
            mechanize.install_opener(mechanize.build_opener(
                mechanize.ProxyHandler(proxies={})))
            urllib2.install_opener(urllib2.build_opener(
                urllib2.ProxyHandler(proxies={})))

            def revert_install():
                mechanize.install_opener(old_opener_m)
                urllib2.install_opener(old_opener_u)
            self.add_teardown(revert_install)