我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用hashlib.algorithms()。
def to_native(self, value, context=None): value = super(HashType, self).to_native(value, context) if ':' not in value: raise ValidationError(self.messages['hash_invalid']) hash_type, hash_value = value.split(':', 1) if hash_type not in algorithms: raise ValidationError(self.messages['hash_invalid']) if len(hash_value) != hash_new(hash_type).digest_size * 2: raise ValidationError(self.messages['hash_length']) try: int(hash_value, 16) except ValueError: raise ConversionError(self.messages['hash_hex']) return value
def _generate_hashes(self): if self._buffer is None: raise Exception('No buffer provided, nothing to do') algos = list(hashlib.algorithms) for a in algorithm.__subclasses__(): algos.append(a.__name__) for module in algos: """To support validation within each algorithm's module, we wrap an existing implementation and name it with the 'hashdd_' prefix to avoid conflicts. We're not going to strip this prefix until the very last moment as common hashes like md5 may be used in a variety of places """ if self._algorithms is not None and module not in self._algorithms: # Skip modules that are not expressly enabled continue if module.startswith('hashdd_'): m = getattr(hashlib, module) if m.prefilter(self._buffer): setattr(self, module[7:], m(self._buffer).hexdigest())
def get_fingerprint(path, passphrase=None): """Generate the fingerprint of the public key. """ fingerprint = {} privatekey = load_privatekey(path, passphrase) try: publickey = crypto.dump_publickey(crypto.FILETYPE_ASN1, privatekey) for algo in hashlib.algorithms: f = getattr(hashlib, algo) pubkey_digest = f(publickey).hexdigest() fingerprint[algo] = ':'.join(pubkey_digest[i:i + 2] for i in range(0, len(pubkey_digest), 2)) except AttributeError: # If PyOpenSSL < 16.0 crypto.dump_publickey() will fail. # By doing this we prevent the code from raising an error # yet we return no value in the fingerprint hash. pass return fingerprint
def test_algorithms_attribute(self): self.assertEqual(hashlib.algorithms, tuple([_algo for _algo in self.supported_hash_names if _algo.islower()]))
def _get_hash_object(hash_algo_name): """Create a hash object based on given algorithm. :param hash_algo_name: name of the hashing algorithm. :raises: InvalidInputError, on unsupported or invalid input. :returns: a hash object based on the given named algorithm. """ algorithms = (hashlib.algorithms_guaranteed if six.PY3 else hashlib.algorithms) if hash_algo_name not in algorithms: msg = ("Unsupported/Invalid hash name '%s' provided." % hash_algo_name) raise exception.InvalidInputError(msg) return getattr(hashlib, hash_algo_name)()
def hash_file(file_like_object, hash_algo='md5'): """Generate a hash for the contents of a file. It returns a hash of the file object as a string of double length, containing only hexadecimal digits. It supports all the algorithms hashlib does. :param file_like_object: file like object whose hash to be calculated. :param hash_algo: name of the hashing strategy, default being 'md5'. :raises: InvalidInputError, on unsupported or invalid input. :returns: a condensed digest of the bytes of contents. """ checksum = _get_hash_object(hash_algo) for chunk in iter(lambda: file_like_object.read(32768), b''): checksum.update(chunk) return checksum.hexdigest()
def crack(hashstr, wordlist): for word in wordlist: for hashtype in hashlib.algorithms: func = getattr(hashlib, hashtype) if func(word).hexdigest().lower() == hashstr.lower(): return word, hashtype return None, None
def _get_hash_types(): hash_types = OrderedDict() try: algorithms = hashlib.algorithms_available except AttributeError: algorithms = hashlib.algorithms if 'md5' in algorithms: hash_types['MD5'] = hashlib.md5 if 'sha1' in algorithms: hash_types['SHA1'] = hashlib.sha1 if 'sha256' in algorithms: hash_types['SHA256'] = hashlib.sha256 if 'sha512' in algorithms: hash_types['SHA512'] = hashlib.sha512 hash_types['BLAKE2b_256'] = partial(BLAKE2b.new, digest_bits=256) hash_types['BLAKE2b_512'] = partial(BLAKE2b.new, digest_bits=512) hash_types['RIPEMD160'] = RIPEMD160.new # The ones from hashlib are faster # hash_types['MD5'] = MD5.new # hash_types['SHA1'] = SHA1.new # hash_types['SHA256'] = SHA256.new # hash_types['SHA512'] = SHA512.new hash_types['SHA3_256'] = SHA3_256.new hash_types['SHA3_512'] = SHA3_512.new hash_types['keccak_256'] = partial(keccak.new, digest_bits=256) hash_types['keccak_512'] = partial(keccak.new, digest_bits=512) return hash_types
def install(self, source, dest=None, checksum=None, hash_type='sha1'): """ Download and install an archive file, with optional checksum validation. The checksum can also be given on the `source` URL's fragment. For example:: handler.install('http://example.com/file.tgz#sha1=deadbeef') :param str source: URL pointing to an archive file. :param str dest: Local destination path to install to. If not given, installs to `$CHARM_DIR/archives/archive_file_name`. :param str checksum: If given, validate the archive file after download. :param str hash_type: Algorithm used to generate `checksum`. Can be any hash alrgorithm supported by :mod:`hashlib`, such as md5, sha1, sha256, sha512, etc. """ url_parts = self.parse_url(source) dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched') if not os.path.exists(dest_dir): mkdir(dest_dir, perms=0o755) dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path)) try: self.download(source, dld_file) except URLError as e: raise UnhandledSource(e.reason) except OSError as e: raise UnhandledSource(e.strerror) options = parse_qs(url_parts.fragment) for key, value in options.items(): if not six.PY3: algorithms = hashlib.algorithms else: algorithms = hashlib.algorithms_available if key in algorithms: if len(value) != 1: raise TypeError( "Expected 1 hash value, not %d" % len(value)) expected = value[0] check_hash(dld_file, expected, key) if checksum: check_hash(dld_file, checksum, hash_type) return extract(dld_file, dest)
def get_hash(infile, algorithm='md5', BLOCKSIZE=65536): """Generate file hash without reading in the entire file at once. Original code licensed under MIT. Source: http://pythoncentral.io/hashing-files-with-python/ Parameters ---------- infile : str File of interest (including the path). algorithm : str (optional) Hash algorithm of choice. Defaults to 'md5'. BLOCKSIZE : int (optional) How much data in bytes to read in at once. Returns ------- hash : str The hash of the file. Examples -------- >>> import yt.funcs as funcs >>> funcs.get_hash('/path/to/test.png') 'd38da04859093d430fa4084fd605de60' """ import hashlib try: hasher = getattr(hashlib, algorithm)() except: raise NotImplementedError("'%s' not available! Available algorithms: %s" % (algorithm, hashlib.algorithms)) filesize = os.path.getsize(infile) iterations = int(float(filesize)/float(BLOCKSIZE)) pbar = get_pbar('Generating %s hash' % algorithm, iterations) iter = 0 with open(infile,'rb') as f: buf = f.read(BLOCKSIZE) while len(buf) > 0: hasher.update(buf) buf = f.read(BLOCKSIZE) iter += 1 pbar.update(iter) pbar.finish() return hasher.hexdigest()
def __init__(self, *args, **kwargs): algorithms = set() for algorithm in self.supported_hash_names: algorithms.add(algorithm.lower()) self.constructors_to_test = {} for algorithm in algorithms: self.constructors_to_test[algorithm] = set() # For each algorithm, test the direct constructor and the use # of hashlib.new given the algorithm name. for algorithm, constructors in self.constructors_to_test.items(): constructors.add(getattr(hashlib, algorithm)) def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm): if data is None: return hashlib.new(_alg) return hashlib.new(_alg, data) constructors.add(_test_algorithm_via_hashlib_new) _hashlib = self._conditional_import_module('_hashlib') if _hashlib: # These two algorithms should always be present when this module # is compiled. If not, something was compiled wrong. assert hasattr(_hashlib, 'openssl_md5') assert hasattr(_hashlib, 'openssl_sha1') for algorithm, constructors in self.constructors_to_test.items(): constructor = getattr(_hashlib, 'openssl_'+algorithm, None) if constructor: constructors.add(constructor) _md5 = self._conditional_import_module('_md5') if _md5: self.constructors_to_test['md5'].add(_md5.new) _sha = self._conditional_import_module('_sha') if _sha: self.constructors_to_test['sha1'].add(_sha.new) _sha256 = self._conditional_import_module('_sha256') if _sha256: self.constructors_to_test['sha224'].add(_sha256.sha224) self.constructors_to_test['sha256'].add(_sha256.sha256) _sha512 = self._conditional_import_module('_sha512') if _sha512: self.constructors_to_test['sha384'].add(_sha512.sha384) self.constructors_to_test['sha512'].add(_sha512.sha512) super(HashLibTestCase, self).__init__(*args, **kwargs)