我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用warnings.warn()。
def _warn_unsafe_extraction_path(path): """ If the default extraction path is overridden and set to an insecure location, such as /tmp, it opens up an opportunity for an attacker to replace an extracted file with an unauthorized payload. Warn the user if a known insecure location is used. See Distribute #375 for more details. """ if os.name == 'nt' and not path.startswith(os.environ['windir']): # On Windows, permissions are generally restrictive by default # and temp directories are not writable by other users, so # bypass the warning. return mode = os.stat(path).st_mode if mode & stat.S_IWOTH or mode & stat.S_IWGRP: msg = ("%s is writable by group/others and vulnerable to attack " "when " "used with get_resource_filename. Consider a more secure " "location (set with .set_extraction_path or the " "PYTHON_EGG_CACHE environment variable)." % path) warnings.warn(msg, UserWarning)
def ready(self): if django.VERSION[:2] == (1, 9): import warnings msg = "You are using an unsupported Django version. DJPT support" \ " might be dropped in any following release. See " \ "https://www.djangoproject.com/download/#supported-versions" warnings.warn(msg) from django_performance_testing.registry import \ SettingsOrDefaultBasedRegistry from django_performance_testing import core core.limits_registry = SettingsOrDefaultBasedRegistry() from .test_client import integrate_into_test_client integrate_into_test_client() from .test_runner import integrate_into_django_test_runner integrate_into_django_test_runner() from .queries import setup_sending_before_clearing_queries_log_signal setup_sending_before_clearing_queries_log_signal() from .templates import integrate_into_django_templates integrate_into_django_templates()
def __init__(self, output_dim, num_senses, num_hyps, use_attention=False, return_attention=False, **kwargs): # Set output_dim in kwargs so that we can pass it along to LSTM's init kwargs['output_dim'] = output_dim self.num_senses = num_senses self.num_hyps = num_hyps self.use_attention = use_attention self.return_attention = return_attention super(OntoAttentionLSTM, self).__init__(**kwargs) # Recurrent would have set the input shape to cause the input dim to be 3. Change it. self.input_spec = [InputSpec(ndim=5)] if self.consume_less == "cpu": # In the LSTM implementation in Keras, consume_less = cpu causes all gates' inputs to be precomputed # and stored in memory. However, this doesn't work with OntoLSTM since the input to the gates is # dependent on the previous timestep's output. warnings.warn("OntoLSTM does not support consume_less = cpu. Changing it to mem.") self.consume_less = "mem" #TODO: Remove this dependency. if K.backend() == "tensorflow" and not self.unroll: warnings.warn("OntoLSTM does not work with unroll=False when backend is TF. Changing it to True.") self.unroll = True
def _check_platform(self, other): """ Verify that this and another shellcode have compatible platforms. This means they are for the same plaform, or at least one of them is platform independent. :type other: `Shellcode` :param other: Another shellcode. :return: There is no return value. Warnings are raised if the platforms don't match. """ if 'any' not in (self.arch, other.arch) and self.arch != other.arch: msg = "Processor architectures don't match: %s and %s" msg = msg % (self.arch, other.arch) warnings.warn(msg, ShellcodeWarning) if 'any' not in (self.os, other.os) and self.os != other.os: msg = "Operating systems don't match: %s and %s" msg = msg % (self.os, other.os) warnings.warn(msg, ShellcodeWarning)
def __init__(self, *children): # Populate the list of children. self._children = [] parent = weakref.ref(self) previous = self for child in children: if isinstance(child, str): # bytes child = Raw(child, self.arch, self.os) elif not isinstance(child, Shellcode): raise TypeError( "Expected Shellcode, got %s instead" % type(child)) elif child.parent: msg = "Already had a parent: %r" % child.parent warnings.warn(msg, ShellcodeWarning) child._parent = parent self._children.append(child) previous._check_platform(child) previous = child # Dark magic to implement the metadata combination feature.
def __add__(self, other ): """ Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement converts them to L{Literal}s by default. Example:: greet = Word(alphas) + "," + Word(alphas) + "!" hello = "Hello, World!" print (hello, "->", greet.parseString(hello)) Prints:: Hello, World! -> ['Hello', ',', 'World', '!'] """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) return None return And( [ self, other ] )
def __init__( self, matchString ): super(Literal,self).__init__() self.match = matchString self.matchLen = len(matchString) try: self.firstMatchChar = matchString[0] except IndexError: warnings.warn("null string passed to Literal; use Empty() instead", SyntaxWarning, stacklevel=2) self.__class__ = Empty self.name = '"%s"' % _ustr(self.match) self.errmsg = "Expected " + self.name self.mayReturnEmpty = False self.mayIndexError = False # Performance tuning: this routine gets called a *lot* # if this is a single character match string and the first character matches, # short-circuit as quickly as possible, and avoid calling startswith #~ @profile
def __init__( self, matchString, identChars=None, caseless=False ): super(Keyword,self).__init__() if identChars is None: identChars = Keyword.DEFAULT_KEYWORD_CHARS self.match = matchString self.matchLen = len(matchString) try: self.firstMatchChar = matchString[0] except IndexError: warnings.warn("null string passed to Keyword; use Empty() instead", SyntaxWarning, stacklevel=2) self.name = '"%s"' % self.match self.errmsg = "Expected " + self.name self.mayReturnEmpty = False self.mayIndexError = False self.caseless = caseless if caseless: self.caselessmatch = matchString.upper() identChars = identChars.upper() self.identChars = set(identChars)
def toXmlName(self, name): nameFirst = name[0] nameRest = name[1:] m = nonXmlNameFirstBMPRegexp.match(nameFirst) if m: warnings.warn("Coercing non-XML name", DataLossWarning) nameFirstOutput = self.getReplacementCharacter(nameFirst) else: nameFirstOutput = nameFirst nameRestOutput = nameRest replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest)) for char in replaceChars: warnings.warn("Coercing non-XML name", DataLossWarning) replacement = self.getReplacementCharacter(char) nameRestOutput = nameRestOutput.replace(char, replacement) return nameFirstOutput + nameRestOutput
def _warn_legacy_version(self): LV = packaging.version.LegacyVersion is_legacy = isinstance(self._parsed_version, LV) if not is_legacy: return # While an empty version is technically a legacy version and # is not a valid PEP 440 version, it's also unlikely to # actually come from someone and instead it is more likely that # it comes from setuptools attempting to parse a filename and # including it in the list. So for that we'll gate this warning # on if the version is anything at all or not. if not self.version: return tmpl = textwrap.dedent(""" '{project_name} ({version})' is being parsed as a legacy, non PEP 440, version. You may find odd behavior and sort order. In particular it will be sorted as less than 0.0. It is recommended to migrate to PEP 440 compatible versions. """).strip().replace('\n', ' ') warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
def _dep_map(self): try: return self.__dep_map except AttributeError: dm = self.__dep_map = {None: []} for name in 'requires.txt', 'depends.txt': for extra, reqs in split_sections(self._get_metadata(name)): if extra: if ':' in extra: extra, marker = extra.split(':', 1) if invalid_marker(marker): # XXX warn reqs = [] elif not evaluate_marker(marker): reqs = [] extra = safe_extra(extra) or None dm.setdefault(extra, []).extend(parse_requirements(reqs)) return dm
def __init__(self, headers=None, retries=None, validate_certificate=True): if not urlfetch: raise AppEnginePlatformError( "URLFetch is not available in this environment.") if is_prod_appengine_mvms(): raise AppEnginePlatformError( "Use normal urllib3.PoolManager instead of AppEngineManager" "on Managed VMs, as using URLFetch is not necessary in " "this environment.") warnings.warn( "urllib3 is using URLFetch on Google App Engine sandbox instead " "of sockets. To use sockets directly instead of URLFetch see " "https://urllib3.readthedocs.io/en/latest/contrib.html.", AppEnginePlatformWarning) RequestMethods.__init__(self, headers) self.validate_certificate = validate_certificate self.retries = retries or Retry.DEFAULT
def _validate_conn(self, conn): """ Called right before a request is made, after the socket is created. """ super(HTTPSConnectionPool, self)._validate_conn(conn) # Force connect early to allow us to validate the connection. if not getattr(conn, 'sock', None): # AppEngine might not have `.sock` conn.connect() if not conn.is_verified: warnings.warn(( 'Unverified HTTPS request is being made. ' 'Adding certificate verification is strongly advised. See: ' 'https://urllib3.readthedocs.io/en/latest/security.html'), InsecureRequestWarning)
def wrap_socket(self, socket, server_hostname=None, server_side=False): warnings.warn( 'A true SSLContext object is not available. This prevents ' 'urllib3 from configuring SSL appropriately and may cause ' 'certain SSL connections to fail. You can upgrade to a newer ' 'version of Python to solve this. For more information, see ' 'https://urllib3.readthedocs.io/en/latest/security.html' '#insecureplatformwarning.', InsecurePlatformWarning ) kwargs = { 'keyfile': self.keyfile, 'certfile': self.certfile, 'ca_certs': self.ca_certs, 'cert_reqs': self.verify_mode, 'ssl_version': self.protocol, 'server_side': server_side, } if self.supports_set_ciphers: # Platform-specific: Python 2.7+ return wrap_socket(socket, ciphers=self.ciphers, **kwargs) else: # Platform-specific: Python 2.6 return wrap_socket(socket, **kwargs)
def check_install_build_global(options, check_options=None): """Disable wheels if per-setup.py call options are set. :param options: The OptionParser options to update. :param check_options: The options to check, if not supplied defaults to options. """ if check_options is None: check_options = options def getname(n): return getattr(check_options, n, None) names = ["build_options", "global_options", "install_options"] if any(map(getname, names)): control = options.format_control fmt_ctl_no_binary(control) warnings.warn( 'Disabling all use of wheels due to the use of --build-options ' '/ --global-options / --install-options.', stacklevel=2) ########### # options # ###########
def check_nsp(dist, attr, value): """Verify that namespace packages are valid""" ns_packages = value assert_string_list(dist, attr, ns_packages) for nsp in ns_packages: if not dist.has_contents_for(nsp): raise DistutilsSetupError( "Distribution contains no modules or packages for " + "namespace package %r" % nsp ) parent, sep, child = nsp.rpartition('.') if parent and parent not in ns_packages: distutils.log.warn( "WARNING: %r is declared as a package namespace, but %r" " is not: please correct this in setup.py", nsp, parent )
def write_or_delete_file(self, what, filename, data, force=False): """Write `data` to `filename` or delete if empty If `data` is non-empty, this routine is the same as ``write_file()``. If `data` is empty but not ``None``, this is the same as calling ``delete_file(filename)`. If `data` is ``None``, then this is a no-op unless `filename` exists, in which case a warning is issued about the orphaned file (if `force` is false), or deleted (if `force` is true). """ if data: self.write_file(what, filename, data) elif os.path.exists(filename): if data is None and not force: log.warn( "%s not set in setup(), but %s exists", what, filename ) return else: self.delete_file(filename)
def _safe_path(self, path): enc_warn = "'%s' not %s encodable -- skipping" # To avoid accidental trans-codings errors, first to unicode u_path = unicode_utils.filesys_decode(path) if u_path is None: log.warn("'%s' in unexpected encoding -- skipping" % path) return False # Must ensure utf-8 encodability utf8_path = unicode_utils.try_encode(u_path, "utf-8") if utf8_path is None: log.warn(enc_warn, path, 'utf-8') return False try: # accept is either way checks out if os.path.exists(u_path) or os.path.exists(utf8_path): return True # this will catch any encode errors decoding u_path except UnicodeEncodeError: log.warn(enc_warn, path, sys.getfilesystemencoding())
def _called_from_setup(run_frame): """ Attempt to detect whether run() was called from setup() or by another command. If called by setup(), the parent caller will be the 'run_command' method in 'distutils.dist', and *its* caller will be the 'run_commands' method. If called any other way, the immediate caller *might* be 'run_command', but it won't have been called by 'run_commands'. Return True in that case or if a call stack is unavailable. Return False otherwise. """ if run_frame is None: msg = "Call stack not available. bdist_* commands may fail." warnings.warn(msg) if platform.python_implementation() == 'IronPython': msg = "For best results, pass -X:Frames to enable call stack." warnings.warn(msg) return True res = inspect.getouterframes(run_frame)[2] caller, = res[:1] info = inspect.getframeinfo(caller) caller_module = caller.f_globals.get('__name__', '') return ( caller_module == 'distutils.dist' and info.function == 'run_commands' )
def maybe_move(self, spec, dist_filename, setup_base): dst = os.path.join(self.build_directory, spec.key) if os.path.exists(dst): msg = ( "%r already exists in %s; build directory %s will not be kept" ) log.warn(msg, spec.key, self.build_directory, setup_base) return setup_base if os.path.isdir(dist_filename): setup_base = dist_filename else: if os.path.dirname(dist_filename) == setup_base: os.unlink(dist_filename) # get it out of the tmp dir contents = os.listdir(setup_base) if len(contents) == 1: dist_filename = os.path.join(setup_base, contents[0]) if os.path.isdir(dist_filename): # if the only thing there is a directory, move it instead setup_base = dist_filename ensure_directory(dst) shutil.move(setup_base, dst) return dst
def build_and_install(self, setup_script, setup_base): args = ['bdist_egg', '--dist-dir'] dist_dir = tempfile.mkdtemp( prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script) ) try: self._set_fetcher_options(os.path.dirname(setup_script)) args.append(dist_dir) self.run_setup(setup_script, setup_base, args) all_eggs = Environment([dist_dir]) eggs = [] for key in all_eggs: for dist in all_eggs[key]: eggs.append(self.install_egg(dist.location, setup_base)) if not eggs and not self.dry_run: log.warn("No eggs found in %s (setup script problem?)", dist_dir) return eggs finally: rmtree(dist_dir) log.set_verbosity(self.verbose) # restore our log verbosity
def byte_compile(self, to_compile): if sys.dont_write_bytecode: self.warn('byte-compiling is disabled, skipping.') return from distutils.util import byte_compile try: # try to make the byte compile messages quieter log.set_verbosity(self.verbose - 1) byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run) if self.optimize: byte_compile( to_compile, optimize=self.optimize, force=1, dry_run=self.dry_run, ) finally: log.set_verbosity(self.verbose) # restore original verbosity
def set_package(fxn): """Set __package__ on the returned module. This function is deprecated. """ @functools.wraps(fxn) def set_package_wrapper(*args, **kwargs): warnings.warn('The import system now takes care of this automatically.', DeprecationWarning, stacklevel=2) module = fxn(*args, **kwargs) if getattr(module, '__package__', None) is None: module.__package__ = module.__name__ if not hasattr(module, '__path__'): module.__package__ = module.__package__.rpartition('.')[0] return module return set_package_wrapper
def splitunc(p): """Deprecated since Python 3.1. Please use splitdrive() instead; it now handles UNC paths. Split a pathname into UNC mount point and relative path specifiers. Return a 2-tuple (unc, rest); either part may be empty. If unc is not empty, it has the form '//host/mount' (or similar using backslashes). unc+rest is always the input path. Paths containing drive letters never have a UNC part. """ import warnings warnings.warn("ntpath.splitunc is deprecated, use ntpath.splitdrive instead", DeprecationWarning, 2) drive, path = splitdrive(p) if len(drive) == 2: # Drive letter present return p[:0], p return drive, path # Split a path in head (everything up to the last '/') and tail (the # rest). After the trailing '/' is stripped, the invariant # join(head, tail) == p holds. # The resulting head won't end in '/' unless it is the root.
def __init__(*args, **kwargs): if not args: raise TypeError("descriptor '__init__' of 'UserDict' object " "needs an argument") self, *args = args if len(args) > 1: raise TypeError('expected at most 1 arguments, got %d' % len(args)) if args: dict = args[0] elif 'dict' in kwargs: dict = kwargs.pop('dict') import warnings warnings.warn("Passing 'dict' as keyword argument is deprecated", DeprecationWarning, stacklevel=2) else: dict = None self.data = {} if dict is not None: self.update(dict) if len(kwargs): self.update(kwargs)
def material_vector(self, mat_parameter): """Get a vector that contains the specified material parameter for every point of the field. Args: mat_parameter: Material parameter of interest. Returns: Vector which contains the specified material parameter for each point in the field. """ param_found = False mat_vector = np.zeros(self.num_points) for mat_reg in self.material_regions: for mat in mat_reg.materials: if hasattr(mat, mat_parameter): mat_vector[mat_reg.region.indices] = getattr(mat, mat_parameter) param_found = True if not param_found: wn.warn('Material parameter {} not found in set materials. Returning zeros.' .format(mat_parameter), stacklevel=2) return mat_vector
def _process_tRNS(self, data): # http://www.w3.org/TR/PNG/#11tRNS self.trns = data if self.colormap: if not self.plte: warnings.warn("PLTE chunk is required before tRNS chunk.") else: if len(data) > len(self.plte)/3: # Was warning, but promoted to Error as it # would otherwise cause pain later on. raise FormatError("tRNS chunk is too long.") else: if self.alpha: raise FormatError( "tRNS chunk is not valid with colour type %d." % self.color_type) try: self.transparent = \ struct.unpack("!%dH" % self.color_planes, data) except struct.error: raise FormatError("tRNS chunk has incorrect length.")
def get_key(dotenv_path, key_to_get): """ Gets the value of a given key from the given .env If the .env path given doesn't exist, fails """ key_to_get = str(key_to_get) if not os.path.exists(dotenv_path): warnings.warn("can't read %s - it doesn't exist." % dotenv_path) return None dotenv_as_dict = dotenv_values(dotenv_path) if key_to_get in dotenv_as_dict: return dotenv_as_dict[key_to_get] else: warnings.warn("key %s not found in %s." % (key_to_get, dotenv_path)) return None
def set_key(dotenv_path, key_to_set, value_to_set, quote_mode="always"): """ Adds or Updates a key/value to the given .env If the .env path given doesn't exist, fails instead of risking creating an orphan .env somewhere in the filesystem """ key_to_set = str(key_to_set) value_to_set = str(value_to_set).strip("'").strip('"') if not os.path.exists(dotenv_path): warnings.warn("can't write to %s - it doesn't exist." % dotenv_path) return None, key_to_set, value_to_set dotenv_as_dict = OrderedDict(parse_dotenv(dotenv_path)) dotenv_as_dict[key_to_set] = value_to_set success = flatten_and_write(dotenv_path, dotenv_as_dict, quote_mode) return success, key_to_set, value_to_set
def unset_key(dotenv_path, key_to_unset, quote_mode="always"): """ Removes a given key from the given .env If the .env path given doesn't exist, fails If the given key doesn't exist in the .env, fails """ key_to_unset = str(key_to_unset) if not os.path.exists(dotenv_path): warnings.warn("can't delete from %s - it doesn't exist." % dotenv_path) return None, key_to_unset dotenv_as_dict = dotenv_values(dotenv_path) if key_to_unset in dotenv_as_dict: dotenv_as_dict.pop(key_to_unset, None) else: warnings.warn("key %s not removed from %s - key doesn't exist." % (key_to_unset, dotenv_path)) return None, key_to_unset success = flatten_and_write(dotenv_path, dotenv_as_dict, quote_mode) return success, key_to_unset
def __init__(self, headers=None, retries=None, validate_certificate=True, urlfetch_retries=True): if not urlfetch: raise AppEnginePlatformError( "URLFetch is not available in this environment.") if is_prod_appengine_mvms(): raise AppEnginePlatformError( "Use normal urllib3.PoolManager instead of AppEngineManager" "on Managed VMs, as using URLFetch is not necessary in " "this environment.") warnings.warn( "urllib3 is using URLFetch on Google App Engine sandbox instead " "of sockets. To use sockets directly instead of URLFetch see " "https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.", AppEnginePlatformWarning) RequestMethods.__init__(self, headers) self.validate_certificate = validate_certificate self.urlfetch_retries = urlfetch_retries self.retries = retries or Retry.DEFAULT
def _validate_conn(self, conn): """ Called right before a request is made, after the socket is created. """ super(HTTPSConnectionPool, self)._validate_conn(conn) # Force connect early to allow us to validate the connection. if not getattr(conn, 'sock', None): # AppEngine might not have `.sock` conn.connect() if not conn.is_verified: warnings.warn(( 'Unverified HTTPS request is being made. ' 'Adding certificate verification is strongly advised. See: ' 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' '#ssl-warnings'), InsecureRequestWarning)
def wrap_socket(self, socket, server_hostname=None, server_side=False): warnings.warn( 'A true SSLContext object is not available. This prevents ' 'urllib3 from configuring SSL appropriately and may cause ' 'certain SSL connections to fail. You can upgrade to a newer ' 'version of Python to solve this. For more information, see ' 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' '#ssl-warnings', InsecurePlatformWarning ) kwargs = { 'keyfile': self.keyfile, 'certfile': self.certfile, 'ca_certs': self.ca_certs, 'cert_reqs': self.verify_mode, 'ssl_version': self.protocol, 'server_side': server_side, } if self.supports_set_ciphers: # Platform-specific: Python 2.7+ return wrap_socket(socket, ciphers=self.ciphers, **kwargs) else: # Platform-specific: Python 2.6 return wrap_socket(socket, **kwargs)