我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用hashlib.algorithms_available()。
def hasher(data, algos=ALGOS): try: data = data.encode() except Exception: pass result = {} for algo in sorted(hashlib.algorithms_available): if algo in algos: h = hashlib.new(algo) h.update(data) result[algo] = h.hexdigest() return result
def _get_digest_algorithm(name = None): ''' Get the digest algorithm to use based on the settings. :param name: name of algorithm to use :type name: str :return: digest algorithm :rtype: class ''' if name: possible_algorithms = [name.lower()] else: possible_algorithms = filter(lambda a: a in hashlib.algorithms_available, (map(str.lower, settings.ALLOWED_DIGEST_ALGORITHMS))) for algo_name in possible_algorithms: if hasattr(hashlib, algo_name): return getattr(hashlib, algo_name) logger.error('No algorithm from %r found in hashlib %r', settings.ALLOWED_DIGEST_ALGORITHMS, hashlib.algorithms_available) raise exceptions.AlgorithmNotSupported('No suitable algorithm found.')
def try_all_algorithms(self): """ Try every algorithm available on the computer using the 'algorithms_available' functions from hashlib an example of this functions would be: >>> print(hashlib.algorithms_available) set(['SHA1', 'SHA224', 'SHA', 'SHA384', ...]) >>> HashCracker("9a8b1b7eee229046fc2701b228fc2aff", type=None).try_all_algorithms() {..., 'dc1e4c61bea0e5390c140fb1299a68a0f31b7af51f90abbd058f09689a8bb823': ['1 endow', 'sha256'], '362b004395a3f52d9a0132868bd180bd': ['17 fellowship', 'MD5'], '03195f6b6fa8dc1951f4944aed8cc4582cd72321': ['lovingkindness', 'RIPEMD160'], ...""" for alg in hashlib.algorithms_available: for word in self.words: data = hashlib.new(alg) data.update(word.strip()) self.results[data.hexdigest()] = [word.strip(), alg] LOGGER.info("Created %i hashes, verifying against given hash (%s)" % (len(self.results), self.hash)) if self.verify_hashes() is False: LOGGER.fatal("Unable to verify hash: %s" % self.hash) else: return self.verify_hashes()
def bruteforce(): out = raw_input('hash to crack> ') print 'algorithms supported: '+', '.join(hashlib.algorithms_available) algo = raw_input('algorithm to use> ') if not algo in hashlib.algorithms_available: print 'invalid algorithm' return fn = raw_input('path to password list> ').strip() def check(pw): h = hashlib.new(algo) h.update(pw) if h.hexdigest() == out: return True return False zc = zippycrack(check,fn,numthreads=8) print zc.run()
def pubkey_to_address(pubkey: bytes) -> str: if 'ripemd160' not in hashlib.algorithms_available: raise RuntimeError('missing ripemd160 hash algorithm') sha = hashlib.sha256(pubkey).digest() ripe = hashlib.new('ripemd160', sha).digest() return b58encode_check(b'\x00' + ripe)
def make_digesters(fpath, families, include_CRCs=False): """ Create and return a dictionary of all our active hash algorithms. Each digester is a 2-tuple ``( digester.update_func(bytes), digest_func(digester) -> int)``. """ ## TODO: simplify digester-tuple API, ie: (digester, update_func(d), digest_func(d)) families = set(f.upper() for f in families) digesters = OrderedDict() digesters['LENGTH'] = (LenDigester(), LenDigester.digest) # Default Algos for algo in sorted(hashlib.algorithms_available): # algorithms_available can have duplicates aname = algo.upper() if aname not in digesters and is_algo_in_families(aname, families): digesters[aname] = (hashlib.new(algo), lambda d: d.hexdigest()) # CRC if include_CRCs: for name in sorted(crcmod._crc_definitions_by_name): crc_name = crcmod._crc_definitions_by_name[name]['name'] aname = crc_name.upper() if is_algo_in_families(aname, families): digesters[aname] = (crcmod.PredefinedCrc(crc_name), lambda d: hex(d.crcValue)) add_git_digesters(digesters, fpath) ## Append plugin digesters. # digesters.update(known_digesters) for digester in list(digesters.keys()): if not is_algo_in_families(digester.upper(), families): digesters.pop(digester, None) return digesters
def test_algorithms_available(self): self.assertTrue(set(hashlib.algorithms_guaranteed). issubset(hashlib.algorithms_available))
def __get_algorithms(): """return available algorithms from hashlib""" return hashlib.algorithms_available # set(['SHA1', 'MDC2', 'SHA', 'SHA384', 'ecdsa-with-SHA1', 'SHA256', # 'SHA512', 'md4', 'md5', 'sha1', 'dsaWithSHA', 'DSA-SHA', 'sha', # 'sha224', 'dsaEncryption', 'DSA', 'ripemd160', 'mdc2', 'MD5', # 'MD4', 'sha384', 'SHA224', 'sha256', 'sha512', 'RIPEMD160'])
def compute_checksum(filename, hashtype): file = encode(filename) if not exists(file): return None buf = fsbsize(filename) if hashtype in ("adler32", "crc32"): hf = getattr(zlib, hashtype) last = 0 with open(file, "rb") as f: for chunk in iter(lambda: f.read(buf), ''): last = hf(chunk, last) return "%x" % last elif hashtype in hashlib.algorithms_available: h = hashlib.new(hashtype) with open(file, "rb") as f: for chunk in iter(lambda: f.read(buf * h.block_size), ''): h.update(chunk) return h.hexdigest() else: return None
def hash_cmd(bot, event, irc, args): """<hashing mechanism> <string to hash> Returns a hexadecimal digest of the hashed provided string""" if args[0] in hashlib.algorithms_available: func = getattr(hashlib, args[0]) irc.reply(event, func(" ".join(args[1:]).encode()).hexdigest()) else: irc.reply(event, "Unknown hashing mechanism: " + args[0])
def get_file_hash(path, algorithm='md5'): """Get hash from a given file and hashing algorithm. Args: path: str. Full path to a file. algorithm: str, optional. Name of hashing algorithm. See `hashlib.algorithms_available` for list of available hashing algorithms in python. Returns: str. File hash computed from the file using the specified algorithm. #### Examples ```python get_file_hash('train.gz') ## '5503d900f6902c61682e6b6f408202cb'
""" hash_alg = hashlib.new(algorithm) with open(path, 'rb') as f: read_size = 1024 1024 4 data = f.read(read_size) while data: hash_alg.update(data) data = f.read(read_size) return hash_alg.hexdigest() ```
def hashes_match(path, hash_true, algorithm='md5'): """Check whether the computed hash from the file matches the specified hash string. Args: path: str. Full path to a file. hash_true: str. True hash of the file. algorithm: str, optional. Name of hashing algorithm. See `hashlib.algorithms_available` for list of available hashing algorithms in python. Returns: bool. True if the hashes match, else False. #### Examples ```python hashes_match('train.gz', '5503d900f6902c61682e6b6f408202cb') ## True
""" if os.path.exists(path): return get_file_hash(path, algorithm) == hash_true else: return False ```
def hashstring(): inp = raw_input('string to hash> ') algos = {} for i in hashlib.algorithms_available: algos[i] = hashlib.new(i) for i in algos.keys(): algos[i].update(inp) for i in algos.keys(): print '{} :\t{}'.format(i,algos[i].hexdigest())
def hashfile(): fn = raw_input('file path> ').strip() algos = {} for i in hashlib.algorithms_available: algos[i] = hashlib.new(i) fd = open(fn,'rb') while 1: indata = fd.read(8192) for i in algos.keys(): algos[i].update(indata) if len(indata) < 8192: break for i in algos.keys(): print '{} :\t{}'.format(i,algos[i].hexdigest())
def __init__(self): if 'md5' not in hashlib.algorithms_available: raise Exception( 'vb5_legacy autentication backend requires md5 support from ' 'hashlib or it will not run.')
def _get_hash_types(): hash_types = OrderedDict() try: algorithms = hashlib.algorithms_available except AttributeError: algorithms = hashlib.algorithms if 'md5' in algorithms: hash_types['MD5'] = hashlib.md5 if 'sha1' in algorithms: hash_types['SHA1'] = hashlib.sha1 if 'sha256' in algorithms: hash_types['SHA256'] = hashlib.sha256 if 'sha512' in algorithms: hash_types['SHA512'] = hashlib.sha512 hash_types['BLAKE2b_256'] = partial(BLAKE2b.new, digest_bits=256) hash_types['BLAKE2b_512'] = partial(BLAKE2b.new, digest_bits=512) hash_types['RIPEMD160'] = RIPEMD160.new # The ones from hashlib are faster # hash_types['MD5'] = MD5.new # hash_types['SHA1'] = SHA1.new # hash_types['SHA256'] = SHA256.new # hash_types['SHA512'] = SHA512.new hash_types['SHA3_256'] = SHA3_256.new hash_types['SHA3_512'] = SHA3_512.new hash_types['keccak_256'] = partial(keccak.new, digest_bits=256) hash_types['keccak_512'] = partial(keccak.new, digest_bits=512) return hash_types
def on_start(self): functors_list = [ _Functor_ArgQuoteUnix(), _Functor_ArgQuoteWin(), _Functor_ArgSplitUnix(), _Functor_ArgSplitWin(), _Functor_CaseConversion(), _Functor_Keypirinha(), _Functor_RandBytes(), _Functor_RandPassword(), _Functor_RandUUID(), _Functor_Rot13(), _Functor_UrlQuote(), _Functor_UrlQuotePlus(), _Functor_UrlSplit(), _Functor_UrlUnquote(), _Functor_ZLib("adler32"), _Functor_ZLib("crc32")] for algo in hashlib.algorithms_available: # some algorithms are declared twice in the list, like 'MD4' and # 'md4', in which case we favor uppercased one if algo.upper() != algo and algo.upper() in hashlib.algorithms_available: continue functors_list.append(_Functor_Hashlib(algo)) self.functors = {} for functor in functors_list: if functor.name in self.functors: self.warn("functor declared twice:", functor.name) else: self.functors[functor.name] = functor
def install(self, source, dest=None, checksum=None, hash_type='sha1'): """ Download and install an archive file, with optional checksum validation. The checksum can also be given on the `source` URL's fragment. For example:: handler.install('http://example.com/file.tgz#sha1=deadbeef') :param str source: URL pointing to an archive file. :param str dest: Local destination path to install to. If not given, installs to `$CHARM_DIR/archives/archive_file_name`. :param str checksum: If given, validate the archive file after download. :param str hash_type: Algorithm used to generate `checksum`. Can be any hash alrgorithm supported by :mod:`hashlib`, such as md5, sha1, sha256, sha512, etc. """ url_parts = self.parse_url(source) dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched') if not os.path.exists(dest_dir): mkdir(dest_dir, perms=0o755) dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path)) try: self.download(source, dld_file) except URLError as e: raise UnhandledSource(e.reason) except OSError as e: raise UnhandledSource(e.strerror) options = parse_qs(url_parts.fragment) for key, value in options.items(): if not six.PY3: algorithms = hashlib.algorithms else: algorithms = hashlib.algorithms_available if key in algorithms: if len(value) != 1: raise TypeError( "Expected 1 hash value, not %d" % len(value)) expected = value[0] check_hash(dld_file, expected, key) if checksum: check_hash(dld_file, checksum, hash_type) return extract(dld_file, dest)
def main(): module = AnsibleModule( argument_spec=dict( url=dict(required=True, type='str'), dest=dict(required=True, type='str'), checksum=dict(required=False, type='str', default=''), chunksize=dict(required=False, type='int', default=DEFAULT_CHUNK_SIZE), validate_certs=dict(required=False, type='bool', default=True), client_cert=dict(required=False, type='str', default=''), client_key=dict(required=False, type='str', default='') )) url = module.params['url'] dest = module.params['dest'] checksum = module.params['checksum'] chunksize = module.params['chunksize'] validate = module.params['validate_certs'] client_cert = module.params['client_cert'] client_key = module.params['client_key'] if client_cert: certs = (client_cert, client_key) if client_key else client_cert else: certs = None if checksum == '': hash_algo, checksum = None, None else: try: hash_algo, checksum = checksum.rsplit(':', 1) except ValueError: module.fail_json(msg='The checksum parameter has to be in format ' '"<algorithm>:<checksum>"') checksum = checksum.lower() if not all(c in string.hexdigits for c in checksum): module.fail_json(msg='The checksum must be valid HEX number') if hash_algo not in hashlib.algorithms_available: module.fail_json(msg="%s checksums are not supported" % hash_algo) try: actual_checksum = stream_to_dest( url, dest, chunksize, hash_algo, verify=validate, certs=certs) except Exception as e: module.fail_json(msg=str(e)) else: if hash_algo and actual_checksum != checksum: module.fail_json(msg='Invalid dest checksum') else: module.exit_json(changed=True) # NOTE(pas-ha) Ansible's module_utils.basic is licensed under BSD (2 clause)
def verify_hashes(packages, archives, hash_alg='md5'): """ Loop through all given package objects and compare the hashes with hashes in package archive. Any hash that is supported by Python's hashlib can be used for comparison. In the interest of speed, files are iterated in the order they appear in the archive.? If they are large packages, they should be opened decompressed. packages and archives are assumed to zippable. Return a tuple of file hashes that do match. """ def chunked(seq, size=1024): for block in iter(lambda: seq.read(size), b''): if block: yield block if hash_alg not in hashlib.algorithms_available: raise ValueError("{} hash algorithm not available in hashlib.".format(hash_alg)) _new_hasher = lambda: hashlib.new(hash_alg) for pk, ar in zip(packages, archives): for tarinfo in ar: th, fh = _new_hasher(), _new_hasher() tfile = next(ar.extract(tarinfo, destination=None)) fpath = os.path.join(pk.path, tarinfo.path) if not os.path.exists(fpath): return False for x in chunked(tfile): th.update(x) with open(fpath, 'rb') as fi: for x in chunked(fi): fh.update(x) if th.digest() == fh.digest(): continue else: print("Mismathed hash: {}".format(tarinfo.path)) return False return True