我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用ssl._create_unverified_context()。
def __init__(self, username=None, password=None, version=None, debug=None, requesttype=None, baseurl=None, site=None): if username: self.username = username if password: self.password = password if version: self.version = version if debug: self.debug = debug if requesttype: self.requesttype = requesttype if baseurl: self.baseurl = baseurl if site: self.site = site ssl._create_default_https_context = ssl._create_unverified_context # This is the way to allow unverified SSL self.cj = http.cookiejar.CookieJar() opener = urllib.request.build_opener(urllib.request.HTTPHandler(debuglevel=1 if self.debug else 0), urllib.request.HTTPSHandler(debuglevel=1 if self.debug else 0), urllib.request.HTTPCookieProcessor(self.cj)) opener.addheaders = [('User-agent', 'Mozilla/5.0')] urllib.request.install_opener(opener)
def __init__(self, username=None, password=None, debug=None, requesttype=None, baseurl=None): if username: self.username = username if password: self.password = password if debug: self.debug = debug if requesttype: self.requesttype = requesttype if baseurl: self.baseurl = baseurl ssl._create_default_https_context = ssl._create_unverified_context # This is the way to allow unverified SSL self.cj = http.cookiejar.CookieJar() opener = urllib.request.build_opener(urllib.request.HTTPHandler(debuglevel=1 if self.debug else 0), urllib.request.HTTPSHandler(debuglevel=1 if self.debug else 0), urllib.request.HTTPCookieProcessor(self.cj)) opener.addheaders = [('User-agent', 'Mozilla/5.0')] urllib.request.install_opener(opener)
def process_task(self, server_connectivity_info, plugin_command, option_dict=None): if option_dict and 'path' in option_dict.keys(): path = str(option_dict['path']) else: path = '/' if server_connectivity_info.port == 80: conn = httplib.HTTPConnection(server_connectivity_info.ip_address,server_connectivity_info.port) elif server_connectivity_info.port == 443: conn = httplib.HTTPSConnection(server_connectivity_info.ip_address,server_connectivity_info.port,context=ssl._create_unverified_context()) else: raise ValueError("ShellshockTesterPlugin: Can\'t make test for this port {0}".format(server_connectivity_info.port)) try: conn.connect() except Exception as e: raise ValueError("ShellshockTesterPlugin: Connection error for port {0}. {1}".format(server_connectivity_info.port,str(e))) else: conn.request("GET", path, "", {'User-Agent': '() { :; }; echo; echo Vulnerable to CVE-2014-6271'}) response = conn.getresponse() return ShellshockTesterResult(server_connectivity_info, plugin_command, option_dict, self.is_vulnerable(response))
def post_request(self, request, payload=None): # FIXME: provide full set of ssl options instead of this hack if self.server_url.startswith('https'): import ssl return urllib2.urlopen(request, data=payload, timeout=self.timeout, context=ssl._create_unverified_context()) return urllib2.urlopen(request, data=payload, timeout=self.timeout) # def post_request(self, request, payload=None): # @UnusedVariable # try: # try: # _response = urllib2.urlopen(request, timeout=self.timeout) # except TypeError: # _response = urllib2.urlopen(request) # except urllib2.HTTPError, e: # logerr("post failed: %s" % e) # raise weewx.restx.FailedPost(e) # else: # return _response
def getJson(self, url): global context self.context = ssl._create_unverified_context() try: self.request = urllib2.Request(self.url, headers={'User-agent' : 'BID/1.0'}) self.response = urllib2.urlopen(self.request, context=self.context) self.data = json.loads(self.response.read()) except Exception: self.textEdit.append(u"[Downloader] ERRO! Erro ao recuperar .json para %s!" % (self.url)) return {} return self.data # Método de download das imagens da Imageboard, board e tópico escolhidos. # Para arquivos grandes, o processamento deste método pode congelar a janela até o processo terminar! # ===================================================================================================
def getJson(self, url): global context self.context = ssl._create_unverified_context() try: self.request = urllib2.Request(self.url, headers={'User-agent' : 'BID/1.0'}) self.response = urllib2.urlopen(self.request, context=self.context) self.data = json.loads(self.response.read()) except Exception: self.textEdit.append(u"[Downloader] ERRO! Erro ao recuperar .json em %s!" % (self.url)) return {} return self.data # Método de download das imagens da Imageboard, board e tópico escolhidos. # Para arquivos grandes, o processamento deste método pode congelar a janela até o processo terminar! # ===================================================================================================
def getJson(self, url): global context self.context = ssl._create_unverified_context() try: self.request = urllib2.Request(self.url, headers={'User-agent' : 'BID/1.0'}) self.response = urllib2.urlopen(self.request, context=self.context) self.data = json.loads(self.response.read()) except Exception: self.textEdit.append(u"[Downloader] ERRO! Erro ao recuperar .json em %s!" % (self.url)) return {} return self.data # Método para calcular o progresso do download dos arquivos. # O método resgata o valor da barra e incrementa com o valor de cada progresso. # =============================================================================
def request(cls, url, verify_cert=True): """ Web request :param: url: The url link :return JSON object """ req = urlrequest.Request(url, headers={'User-Agent': 'Mozilla/5.0'}) # res = urlrequest.urlopen(url) if verify_cert: res = urlrequest.urlopen( req, timeout=RESTfulApiSocket.DEFAULT_URLOPEN_TIMEOUT) else: res = urlrequest.urlopen( req, context=ssl._create_unverified_context(), timeout=RESTfulApiSocket.DEFAULT_URLOPEN_TIMEOUT) try: res = json.loads(res.read().decode('utf8')) return res except: return {}
def upload_files_in_session(self, files_map, session_id): for f_name, f_path in files_map.items(): file_spec = self.client.upload_file_service.AddSpec(name=f_name, source_type=UpdateSessionFile.SourceType.PUSH, size=os.path.getsize(f_path)) file_info = self.client.upload_file_service.add(session_id, file_spec) # Upload the file content to the file upload URL with open(f_path, 'rb') as local_file: request = urllib2.Request(file_info.upload_endpoint.uri, local_file) request.add_header('Cache-Control', 'no-cache') request.add_header('Content-Length', '{0}'.format(os.path.getsize(f_path))) request.add_header('Content-Type', 'text/ovf') if self.skip_verification and hasattr(ssl, '_create_unverified_context'): # Python 2.7.9 has stronger SSL certificate validation, # so we need to pass in a context when dealing with # self-signed certificates. context = ssl._create_unverified_context() urllib2.urlopen(request, context=context) else: # Don't pass context parameter since versions of Python # before 2.7.9 don't support it. urllib2.urlopen(request)
def download_to(url, download_dir, insecure=False): name = url.split('/')[-1] file = os.path.join(download_dir, name) click.echo("Downloading {0}".format(url)) bar_len = 1000 with click.progressbar(length=bar_len, width=70) as bar: def hook(count, block_size, total_size): percent = int(count*block_size*bar_len/total_size) if percent > 0 and percent < bar_len: # Hack because we can't set the position bar.pos = percent bar.update(0) context = None if insecure: context = ssl._create_unverified_context() CGetURLOpener(context=context).retrieve(url, filename=file, reporthook=hook, data=None) bar.update(bar_len) if not os.path.exists(file): raise BuildError("Download failed for: {0}".format(url)) return file
def __connect(self): err = self.__socket.connect_ex((self.__host, self.__port)) if not err: self.__connect_ok = True if self.__ssl_on: self.__tls_ctx = ssl._create_unverified_context() ssl_verinfo = ssl.OPENSSL_VERSION_INFO # ??openssl 1.0.2?????????ALPN if ssl_verinfo[0] >= 1 and ssl_verinfo[1] >= 0 and ssl_verinfo[2] >= 2: self.__alpn_on = True if self.__alpn_on: alpn_protocols = ["http/1.1"] self.__tls_ctx.set_alpn_protocols(alpn_protocols) self.__socket = self.__tls_ctx.wrap_socket(self.__socket) self.__socket.setblocking(0) return
def getLatestImageTimestamp(self): currentTimeoutDefault = socket.getdefaulttimeout() socket.setdefaulttimeout(3) try: # TODO: Use Twisted's URL fetcher, urlopen is evil. And it can # run in parallel to the package update. from time import strftime from datetime import datetime imageVersion = about.getImageTypeString().split(" ")[1] imageVersion = (int(imageVersion) < 5 and "%.1f" or "%s") % int(imageVersion) url = "http://openpli.org/download/timestamp/%s~%s" % (HardwareInfo().get_device_model(), imageVersion) try: latestImageTimestamp = datetime.fromtimestamp(int(urlopen(url, timeout=5).read())).strftime(_("%Y-%m-%d %H:%M")) except: # OpenPli 5.0 uses python 2.7.11 and here we need to bypass the certificate check from ssl import _create_unverified_context latestImageTimestamp = datetime.fromtimestamp(int(urlopen(url, timeout=5, context=_create_unverified_context()).read())).strftime(_("%Y-%m-%d %H:%M")) except: latestImageTimestamp = "" socket.setdefaulttimeout(currentTimeoutDefault) return latestImageTimestamp
def download_image(url: str = '', save_path: str = '', unverified_ctx: bool = False) -> Union[None, str]: """Download image and save in current directory on local machine. :param str url: URL to image. :param str save_path: Saving path. :param bool unverified_ctx: Create unverified context. :return: Image name. :rtype: str or None """ if unverified_ctx: ssl._create_default_https_context = ssl._create_unverified_context if url is not None: image_name = url.rsplit('/')[-1] request.urlretrieve(url, save_path + image_name) return image_name return None
def download_providers(self, url): """Download providers file from url""" path = tempfile.gettempdir() filename = os.path.join(path, 'providers.txt') print("\n----Downloading providers file----") if DEBUG: print("providers url = {}".format(url)) try: # context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) context = ssl._create_unverified_context() urllib.urlretrieve(url, filename, context=context) return filename except Exception: pass # fallback to no ssl context try: urllib.urlretrieve(url, filename) return filename except Exception, e: raise e
def get_url_content(url): '''docstring''' headers = {"User-Agent":'User-Agent:Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N)'\ 'AppleWebKit/537.36 (KHTML, like Gecko) '\ 'Chrome/61.0.3163.100 Mobile Safari/537.36'} try: context = ssl._create_unverified_context() req = urllib.request.Request(url, headers=headers) resp = urllib.request.urlopen(req, context=context).read() content = resp.decode('utf-8') print(content) except urllib.error.URLError as error: if hasattr(error, 'reason'): print(error.reason)
def SetCMSRecording(callID, state): ssl._create_default_https_context = ssl._create_unverified_context conn = http.client.HTTPSConnection(Config.cmsFqdn) if (state == True): payload = "recording=true" else: payload = "recording=false" headers = { 'authorization': Config.cmsGenericAuthRealm, 'content-type': "application/x-www-form-urlencoded", 'cache-control': "no-cache", 'postman-token': "b5f016ed-5e19-d311-563e-c6aa7fdaa591" } conn.request("PUT", "/api/v1/calls/" + callID, payload, headers) res = conn.getresponse() data = res.read() print(data.decode("utf-8")) print("Recording Bit Set")
def _vmware_connect(self, target, section): """ Connect to Vcenter and get connection """ context = None if self.config[section]['ignore_ssl'] and \ hasattr(ssl, "_create_unverified_context"): context = ssl._create_unverified_context() try: si = connect.Connect(target, 443, self.config[section]['vmware_user'], self.config[section]['vmware_password'], sslContext=context) return si except vmodl.MethodFault as error: print("Caught vmodl fault: " + error.msg) return None
def createCycle(self, cycleName): if cycleName in self.getExistedCycleNames(): return self.getCycleId(cycleName) headers = {"Authorization": " Basic " + b64encode(self.username + ":" + self.password), "Content-Type": "application/json"} startDate = datetime.datetime.now().strftime('%d/%b/%y') values = json.dumps( { "clonedCycleId": "", "name": cycleName, "build": "", "environment": "", "description": "AACI Zephyr plugin", "startDate": startDate, "endDate": "", "projectId": self.projectId, "versionId": self.versionId}) url = self.zapiBaseUrl + '/zapi/latest/cycle' request = Request(url, data=values, headers=headers) response = json.load(urlopen(request, context=ssl._create_unverified_context())) #print json.dumps(response) return response['id']
def addTestToCycle(self, cycleId, caseList): if not isinstance(caseList, list): cases = [caseList] else: cases = caseList values = json.dumps({ "issues": cases, "versionId": self.versionId, "cycleId": cycleId, "projectId": self.projectId, "method": "1" }) headers = {"Authorization": " Basic " + b64encode(self.username + ":" + self.password), "Content-Type": "application/json"} url = self.zapiBaseUrl + '/zapi/latest/execution/addTestsToCycle/' request = Request(url, data=values, headers=headers) urlopen(request,context=ssl._create_unverified_context()).read()
def _updateWindowsRootCertificates(): crypt = ctypes.windll.crypt32 # Get the server certificate. sslCont = ssl._create_unverified_context() u = urlopen("https://addons.nvda-project.org", context=sslCont) cert = u.fp._sock.getpeercert(True) u.close() # Convert to a form usable by Windows. certCont = crypt.CertCreateCertificateContext( 0x00000001, # X509_ASN_ENCODING cert, len(cert)) # Ask Windows to build a certificate chain, thus triggering a root certificate update. chainCont = ctypes.c_void_p() crypt.CertGetCertificateChain(None, certCont, None, None, ctypes.byref(updateCheck.CERT_CHAIN_PARA(cbSize=ctypes.sizeof(updateCheck.CERT_CHAIN_PARA), RequestedUsage=updateCheck.CERT_USAGE_MATCH())), 0, None, ctypes.byref(chainCont)) crypt.CertFreeCertificateChain(chainCont) crypt.CertFreeCertificateContext(certCont)
def index(request): if request.method == "GET": try: ssl._create_default_https_context = ssl._create_unverified_context opener = wdf_urllib.build_opener( wdf_urllib.HTTPCookieProcessor(CookieJar())) wdf_urllib.install_opener(opener) except: pass uuid = getUUID() url = 'https://login.weixin.qq.com/qrcode/' + uuid params = { 't': 'webwx', '_': int(time.time()), } request = getRequest(url=url, data=urlencode(params)) response = wdf_urllib.urlopen(request) context = { 'uuid': uuid, 'response': response.read(), 'delyou': '', } return render_to_response('index.html', context)
def get_json_response(server, api, username, password): """ Returns the response from the URL specified """ try: # lib opener response = {} context = ssl._create_unverified_context() conn = HTTPSConnection(server, context=context) auth = str.encode("%s:%s" % (username, password)) user_and_pass = b64encode(auth).decode("ascii") headers = {'Authorization': 'Basic %s' % user_and_pass, "Accept": 'application/json'} conn.request('GET', api, headers=headers) res = conn.getresponse() bit_data = res.read() string_data = bit_data.decode(encoding='UTF-8') response['data'] = string_data response['status'] = 200 except: print("--Unexpected error:", sys.exc_info()[1]) response['data'] = sys.exc_info()[1] response['status'] = 400 return response
def get_vpn_configs(self): try: with urllib.request.urlopen(self.config_address) as url: config_json = url.read().decode('utf-8').split('\n')[0] except urllib.error.URLError: logger.warning('\nWARNING: There may have been an issue with certificate ' + 'verification to the PIA server info page. Trying to ' + 'bypass cert check that python performs since PEP 476.\n') try: context = ssl._create_unverified_context() with urllib.request.urlopen(self.config_address, context=context) as url: config_json = url.read().decode('utf-8').split('\n')[0] except urllib.error.URLError: sys.exit('\nPIA VPN configurations were not able to be downloaded.' + 'This script needs an internet connection to be able to ' + 'fetch them automatically. Exiting.\n') configs_dict = json.loads(config_json) self.configs_dict = {k: v for k, v in configs_dict.items() if isinstance(v, dict) and v.get('dns')}
def load_mnist(training_num=50000): data_path = os.path.join(os.path.dirname(os.path.realpath('__file__')), 'mnist.npz') if not os.path.isfile(data_path): from six.moves import urllib origin = ( 'https://github.com/sxjscience/mxnet/raw/master/example/bayesian-methods/mnist.npz' ) print 'Downloading data from %s to %s' % (origin, data_path) context = ssl._create_unverified_context() urllib.request.urlretrieve(origin, data_path, context=context) print 'Done!' dat = numpy.load(data_path) X = (dat['X'][:training_num] / 126.0).astype('float32') Y = dat['Y'][:training_num] X_test = (dat['X_test'] / 126.0).astype('float32') Y_test = dat['Y_test'] Y = Y.reshape((Y.shape[0],)) Y_test = Y_test.reshape((Y_test.shape[0],)) return X, Y, X_test, Y_test
def create_api(): """create a twitter api""" from twitter import OAuth, Twitter from app import APP_INSTANCE as app access_token = app.get_config('api.twitter.access_token') access_secret = app.get_config('api.twitter.access_secret') consumer_key = app.get_config('api.twitter.consumer_key') consumer_secret = app.get_config('api.twitter.consumer_secret') # temporary fix for certificate error ssl._create_default_https_context = ssl._create_unverified_context oauth = OAuth(access_token, access_secret, consumer_key, consumer_secret) # Initiate the connection to Twitter API return Twitter(auth=oauth)
def cache_download(url, filename, caller='', ssl_enabled=True): '''Download a file to the cache''' if caller == '': caller_get() filename_full = 'cache/{}_{}'.format(caller, filename) if os.path.isfile(filename_full): return 1 else: try: if ssl_enabled: urllib.request.urlretrieve(url, filename_full) else: ssl._create_default_https_context = ssl._create_unverified_context urllib.request.urlretrieve(url, filename_full) return 1 except urllib.error.HTTPError: return -1 except urllib.error.URLError: return -2
def main(): global mySession ssl._create_default_https_context = ssl._create_unverified_context headers = {'User-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2663.0 Safari/537.36'} mySession = requests.Session() mySession.headers.update(headers) if not getInfos(): print(u'??????') return getQrcode() # ???? waitLogin() # ???????? login() # ???? print('??????') threading.Thread(target=keepLogin) # ????????? getTasksign() # ????task???? getUserinfo() # ???????? # addLinktask("magnet:?xt=urn:btih:690ba0361597ffb2007ad717bd805447f2acc624") # addLinktasks([link]) ????list # print tsign # print "fuck" # get_bt_upload_info() # upload_torrent() add_many_bt()
def __network_ping(self): try: repourl = urljoin(self.get_depot_url(), "versions/0") # Disable SSL peer verification, we just want to check # if the depot is running. url = urlopen(repourl, context=ssl._create_unverified_context()) url.close() except HTTPError as e: # Server returns NOT_MODIFIED if catalog is up # to date if e.code == http_client.NOT_MODIFIED: return True else: return False except URLError as e: return False return True
def _network_ping(self): try: # Ping the versions URL, rather than the default / # so that we don't initialize the BUI code yet. repourl = urljoin(self.url, "versions/0") # Disable SSL peer verification, we just want to check # if the depot is running. urlopen(repourl, context=ssl._create_unverified_context()) except HTTPError as e: if e.code == http_client.FORBIDDEN: return True return False except URLError: return False return True
def __init__(self, original_url=None, timeout=2, search=(), ssl_context=None, url_check=True, similarity_threshold=0.6, display_number=100): # Parse chinese to ascii and delete parameters. self.url = original_url if self.url: self.url_check = url_check self._check_url() self.host = self.url.split('//')[0] + '//' + self.url.split('//')[1].split('/')[0] # Create ssl context. if ssl_context: self.ssl_context = ssl_context else: self.ssl_context = _create_unverified_context() # Initialization parameters. self.temp_file_name = 'mini-spider.temp' self.timeout = timeout self.similarity_threshold = similarity_threshold self.pattern_list = [] self.search_list = self._initialize_search(search) self.display_number = display_number self.result = [] self.http_flag = 0
def test_startUp(self): try: lock = threading.Lock() self.https = https_reader.server_plugin(lock, PORT) except Exception as e: self.fail("Server Failed to Start") time.sleep(1) try: conn = httplib.HTTPSConnection('localhost', PORT, timeout=5, context=ssl._create_unverified_context()) connection = True except Exception as e: print e connection = False finally: self.assertTrue(connection) conn.close() self.https.server.shutdown() self.https.server.server_close() time.sleep(1)
def get_title_from_webpage(url): """ Fetch <title> of a html site for title element :url: str (http url) :returns: str """ # LOL SECURITY ssl._create_default_https_context = ssl._create_unverified_context try: h = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_2) AppleWebKit/601.3.9 (KHTML, like Gecko) Version/9.0.2 Safari/601.3.9'} u = urllib2.Request(url, headers=h) u = urllib2.urlopen(u) soup = BeautifulSoup(u, "html.parser") s = soup.title.string.replace('\n', ' ').replace('\r', '').lstrip().rstrip() s = s.lstrip() return s except (AttributeError, MemoryError, ssl.CertificateError, IOError) as e: return "No title" except ValueError: return False
def getLatestImageTimestamp(self): # TODO: Impement own sh4 timestamp return "" currentTimeoutDefault = socket.getdefaulttimeout() socket.setdefaulttimeout(3) try: # TODO: Use Twisted's URL fetcher, urlopen is evil. And it can # run in parallel to the package update. from time import strftime from datetime import datetime imageVersion = about.getImageTypeString().split(" ")[1] imageVersion = (int(imageVersion) < 5 and "%.1f" or "%s") % int(imageVersion) url = "http://openpli.org/download/timestamp/%s~%s" % (HardwareInfo().get_device_model(), imageVersion) try: latestImageTimestamp = datetime.fromtimestamp(int(urlopen(url, timeout=5).read())).strftime(_("%Y-%m-%d %H:%M")) except: # OpenPli 5.0 uses python 2.7.11 and here we need to bypass the certificate check from ssl import _create_unverified_context latestImageTimestamp = datetime.fromtimestamp(int(urlopen(url, timeout=5, context=_create_unverified_context()).read())).strftime(_("%Y-%m-%d %H:%M")) except: latestImageTimestamp = "" socket.setdefaulttimeout(currentTimeoutDefault) return latestImageTimestamp
def __run(self,req): try: ssl._create_default_https_context = ssl._create_unverified_context if self.__opener is not None: response=self.__opener.open(req) self.__code=response.code return response.read() else: context = ssl._create_unverified_context() response=urllib2.urlopen(req,context=context) self.__code=response.code return response.read() except Exception,e: raise Exception(e.message)
def __run(self,req): try: ssl._create_default_https_context = ssl._create_unverified_context if self.__opener is not None: response=self.__opener.open(req) self.__code=response.code return response.read() else: context = ssl._create_unverified_context() response=urllib2.urlopen(req,context=context) self.__code=response.code return response.read() except Exception,e: print(e) return None
def do_get(url): parsed = urlparse.urlparse(url) path = parsed.path if parsed.query: path = '%s?%s' % (path, parsed.query) if parsed.scheme == 'http': conn = httplib.HTTPConnection(TARGET_IP) elif parsed.scheme == 'https': conn = httplib.HTTPSConnection(TARGET_IP, timeout=8, context=ssl._create_unverified_context()) conn.request('GET', path, headers={'Host': parsed.netloc}) resp = conn.getresponse() body = resp.read().decode('utf8') resp.close() conn.close() return resp, body
def readGithubCommitLogs(self): url = 'https://api.github.com/repos/OpenLD/%s/commits' % self.projects[self.project][0] commitlog = "" from datetime import datetime from json import loads from urllib2 import urlopen try: commitlog += 80 * '-' + '\n' commitlog += url.split('/')[-2] + '\n' commitlog += 80 * '-' + '\n' try: # OpenLD 3.0 uses python 2.7.12 and here we need to bypass the certificate check from ssl import _create_unverified_context log = loads(urlopen(url, timeout=5, context=_create_unverified_context()).read()) except: log = loads(urlopen(url, timeout=5).read()) for c in log: creator = c['commit']['author']['name'] title = c['commit']['message'] date = datetime.strptime(c['commit']['committer']['date'], '%Y-%m-%dT%H:%M:%SZ').strftime('%x %X') commitlog += date + ' ' + creator + '\n' + title + 2 * '\n' commitlog = commitlog.encode('utf-8') self.cachedProjects[self.projects[self.project][1]] = commitlog except: commitlog += _("Currently the commit log cannot be retrieved - please try later again") self["AboutScrollLabel"].setText(commitlog)
def __init__(self, options, handle=None): self.handle = handle self.opts = deepcopy(options) # This is ugly but we don't want any fetches to fail - we expect # to encounter unverified SSL certs! if sys.version_info >= (2, 7, 9): ssl._create_default_https_context = ssl._create_unverified_context # Bit of a hack to support SOCKS because of the loading order of # modules. sfscan will call this to update the socket reference # to the SOCKS one.
def main(): # ignore invalid cert on ESX box import ssl _create_unverified_https_context = ssl._create_unverified_context ssl._create_default_https_context = _create_unverified_https_context vm_list = get_vm_list() app = Flask(__name__) @app.route("/") def index(): result = '''<!DOCTYPE html> <html> <head> <title>VulnLab</title> </head> <body> <h1>Reset</h1> ''' for name, uuid in sorted(vm_list.items()): result += ' <a href="/reset/' + name + '">' + name + '</a><br>\n' result += ''' </body> </html> ''' return result, 200 @app.route("/reset/<string:vm_name>") def reset(vm_name): reset_vm(vm_list, vm_name) return 'OK\n', 200 # start the web server app.run(host="0.0.0.0", port=5000)
def __init__(self, chosenDir): self.chosenDir = chosenDir self.replaceFiles = False self.messageBox = ShowMessageBox() self.changeDir() self.context = ssl._create_unverified_context() # Alterando pasta local para armazenar os arquivos do tópico # ==========================================================
def getVmwServiceContent(vcip, vcuser, vcpwd): unverified_context=ssl._create_unverified_context ssl._create_default_https_context=unverified_context si=Connect(host=vcip,port=443,user=vcuser,pwd=vcpwd) return si.RetrieveContent()
def serversess(self, f_engine_address, f_engine_username, f_engine_password, f_engine_namespace='DOMAIN'): """ Method to setup the session with the Virtualization Engine f_engine_address: The Virtualization Engine's address (IP/DNS Name) f_engine_username: Username to authenticate f_engine_password: User's password f_engine_namespace: Namespace to use for this session. Default: DOMAIN """ # if use_https: # if hasattr(ssl, '_create_unverified_context'): # ssl._create_default_https_context = \ # ssl._create_unverified_context try: if f_engine_password: self.server_session = DelphixEngine(f_engine_address, f_engine_username, f_engine_password, f_engine_namespace) elif f_engine_password is None: self.server_session = DelphixEngine(f_engine_address, f_engine_username, None, f_engine_namespace) except (HttpError, RequestError, JobError) as e: raise DlpxException('ERROR: An error occurred while authenticating' ' to {}:\n {}\n'.format(f_engine_address, e))
def response_code(self, url): try: if self.allow_insecure: return urllib2.urlopen(url, timeout=self.timeout, context=ssl._create_unverified_context()).getcode() # NOQA else: return urllib2.urlopen(url, timeout=self.timeout).getcode() except urllib2.HTTPError as e: return e.getcode() except urllib2.URLError as e: return e
def get_headers(self, url): try: if self.allow_insecure: return dict(urllib2.urlopen(url, timeout=self.timeout, context=ssl._create_unverified_context()).info()) # NOQA else: return dict(urllib2.urlopen(url, timeout=self.timeout).info()) except Exception as e: return e
def read_data(self, url): try: if self.allow_insecure: return urllib2.urlopen(url, timeout=self.timeout, context=ssl._create_unverified_context()).read() # NOQA else: return urllib2.urlopen(url, timeout=self.timeout).read() except Exception as e: return e # AppleLoops