我们从Python开源项目中,提取了以下49个代码示例,用于说明如何使用binascii.unhexlify()。
def assert_fingerprint(cert, fingerprint): """ Checks if given fingerprint matches the supplied certificate. :param cert: Certificate as bytes object. :param fingerprint: Fingerprint as string of hexdigits, can be interspersed by colons. """ fingerprint = fingerprint.replace(':', '').lower() digest_length = len(fingerprint) hashfunc = HASHFUNC_MAP.get(digest_length) if not hashfunc: raise SSLError( 'Fingerprint of invalid length: {0}'.format(fingerprint)) # We need encode() here for py32; works on py2 and p33. fingerprint_bytes = unhexlify(fingerprint.encode()) cert_digest = hashfunc(cert).digest() if not _const_compare_digest(cert_digest, fingerprint_bytes): raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' .format(fingerprint, hexlify(cert_digest)))
def b16decode(s, casefold=False): """Decode the Base16 encoded bytes-like object or ASCII string s. Optional casefold is a flag specifying whether a lowercase alphabet is acceptable as input. For security purposes, the default is False. The result is returned as a bytes object. A binascii.Error is raised if s is incorrectly padded or if there are non-alphabet characters present in the input. """ s = _bytes_from_decode_data(s) if casefold: s = s.upper() if re.search(b'[^0-9A-F]', s): raise binascii.Error('Non-base16 digit found') return binascii.unhexlify(s) # # Ascii85 encoding/decoding #
def b16decode(s, casefold=False): """Decode a Base16 encoded string. s is the string to decode. Optional casefold is a flag specifying whether a lowercase alphabet is acceptable as input. For security purposes, the default is False. The decoded string is returned. A TypeError is raised if s were incorrectly padded or if there are non-alphabet characters present in the string. """ if casefold: s = s.upper() if re.search('[^0-9A-F]', s): raise TypeError('Non-base16 digit found') return binascii.unhexlify(s) # Legacy interface. This code could be cleaned up since I don't believe # binascii has any line length limitations. It just doesn't seem worth it # though.
def gen_solution_hash(hm1, v1, r1, s1, hm2, s2, destination): # Create a string of hex characters with zero padding where necessary. buf = b"" buf += int_to_hex_str(hm1) buf += int_to_hex_str(v1) buf += int_to_hex_str(r1) buf += int_to_hex_str(s1) buf += int_to_hex_str(hm2) buf += int_to_hex_str(s2) # Convert ethereum address to aligned hex data. # It's already in hex so this is easy. dest = destination[2:] if len(dest) % 2: dest = "0" + dest buf += dest # Convert hex string to bytes and hash it. solution_hash = sha3(unhexlify(buf)).hexdigest() # Return the solution hash as hex. return solution_hash # Generate message hashes.
def exploit(): global verbose, packet, webshell t3_handshake() update_payload() update_length() if webshell: print '[INFO] Deploying webshell\n' #if verbose: #print '[INFO] Sending packet:\n'+packet+'\n' try: sock.send(binascii.unhexlify(packet)) except Exception as e: if e.args[1] == 'Broken pipe': print '[ERROR] Broken pipe error. Is backend ssl enabled ?\n' exit() elif e.args[1] == 'No route to host' : print '[ERROR] No route to host. Do you know what you\'re doing ?' exit() print '[INFO] Malicious packet sent\n' sock.close()
def testDirAndFile(self): """Test hashing a directory with one file. The hash sum should stay stable in the long run as this might be used for binary artifact matching in the future. """ with TemporaryDirectory() as tmp: os.mkdir(os.path.join(tmp, "dir")) with open(os.path.join(tmp, "dir", "file"), 'wb') as f: f.write(b'abc') sum1 = hashDirectory(tmp) assert len(sum1) == 20 assert sum1 == binascii.unhexlify( "640f516de78fba0b6d2ddde4451000f142d06b0d") sum2 = hashDirectory(tmp) assert sum1 == sum2
def _restore_state(self): """ Restore user state. """ try: state = self._state_store.get_value(self._state_store_key) state_dict = pickle.loads( binascii.unhexlify(state.encode("utf-8"))) self._name = state_dict['name'] self.enrollment_secret = state_dict['enrollment_secret'] enrollment = state_dict['enrollment'] if enrollment: private_key = serialization.load_pem_private_key( enrollment['private_key'], password=None, backend=default_backend() ) cert = enrollment['cert'] self.enrollment = Enrollment(private_key, cert) self.affiliation = state_dict['affiliation'] self.account = state_dict['account'] self.roles = state_dict['roles'] self._org = state_dict['org'] self.msp_id = state_dict['msp_id'] except Exception as e: raise IOError("Cannot deserialize the user", e)
def generate_keys(d, n): ''' Prints out the 6.x and 7.x keys generated by using an exponent/modulus.''' # Generate ASN.1 data asn1 = asn1_prefix + hashlib.sha256(binascii.unhexlify(keydata)).hexdigest() asn1 = int(asn1, 16) if type(d) == str: d = int(d, 16) if type(n) == str: n = int(n, 16) # Exponentiate it. keys = hashlib.sha256(binascii.unhexlify('%0512X' % pow(asn1, d, n))).hexdigest().upper() print('6.X 0x2F KeyY: %s' % keys[:0x20]) print('7.x 0x25 KeyX: %s' % keys[0x20:])
def get_data_from_http_header(self, buf): ret_buf = b'' lines = buf.split(b'\r\n') if lines and len(lines) > 1: hex_items = lines[0].split(b'%') if hex_items and len(hex_items) > 1: for index in range(1, len(hex_items)): if len(hex_items[index]) < 2: ret_buf += binascii.unhexlify('0' + hex_items[index]) break elif len(hex_items[index]) > 2: ret_buf += binascii.unhexlify(hex_items[index][:2]) break else: ret_buf += binascii.unhexlify(hex_items[index]) return ret_buf return b''
def decodeControllerString(cont_str): ''' Decode an encoded string received via the control port. Decodes hexadecimal, and the Tor Control Protocol QuotedString format, depending on the format of the input string. Does not support the CString encoding, or raw strings that aren't in one of the two supported formats. Throws TypeError when presented with an invalid format. Only some strings in the tor control protocol need encoding. The same encodings are used by tor and the controller. ''' cont_str = cont_str.strip() if (cont_str.startswith("\"") and cont_str.endswith("\"") and len(cont_str) >= 2): # quoted strings escape \ and " with \, then quote with " # this is safe, because we check the string is "*" cont_str = cont_str[1:-1] # the order of these replacements is important: they ensure that # \\" becomes \" rather than " cont_str = cont_str.replace("\\\"", "\"") return cont_str.replace("\\\\", "\\") else: # assume hex, throws TypeError on invalid hex return unhexlify(cont_str)
def init_hash_uuid_lut(session, hashes): """ From the list [x, y, z] of hashes return a dictionary which tells if a chunk was `linked` or not: {x: LinkageEntity, y: LinkageEntity, z: None} """ # Note: unhexlify is necessary since the database stores # binary representations of the hashes bin_hashes = [binascii.unhexlify(ahash.encode('utf-8')) for ahash in hashes] links = session.query(LinkageEntity).filter( LinkageEntity.linkage_hash.in_(bin_hashes)).all() links_cache = {link.friendly_hash(): link for link in links} lut = {} # Extra loop so we can provide an entry for hashes not found in the db for ahash in hashes: lut[ahash] = links_cache.get(ahash, None) return lut
def read_uncompressed_patch(pcpatch_wkb, schema): ''' Patch binary structure uncompressed: byte: endianness (1 = NDR, 0 = XDR) uint32: pcid (key to POINTCLOUD_SCHEMAS) uint32: 0 = no compression uint32: npoints pointdata[]: interpret relative to pcid ''' patchbin = unhexlify(pcpatch_wkb) npoints = unpack("I", patchbin[9:13])[0] dt = schema_dtype(schema) patch = np.fromstring(patchbin[13:], dtype=dt) # debug # print(patch[:10]) return patch, npoints
def decompress(points, schema): """ Decode patch encoded with lazperf. 'points' is a pcpatch in wkb """ # retrieve number of points in wkb pgpointcloud patch npoints = patch_numpoints(points) hexbuffer = unhexlify(points[34:]) hexbuffer += hexa_signed_int32(npoints) # uncompress s = json.dumps(schema).replace("\\", "") dtype = buildNumpyDescription(json.loads(s)) lazdata = bytes(hexbuffer) arr = np.fromstring(lazdata, dtype=np.uint8) d = Decompressor(arr, s) output = np.zeros(npoints * dtype.itemsize, dtype=np.uint8) decompressed = d.decompress(output) return decompressed
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): text = tok.get_string() tok.get_eol() if len(text) != cls.text_len: raise dns.exception.SyntaxError( 'Input text must have %s characters' % cls.text_len) expected_dash_idxs = range(2, cls.byte_len * 3 - 1, 3) for i in expected_dash_idxs: if text[i] != '-': raise dns.exception.SyntaxError('Dash expected at position %s' % i) text = text.replace('-', '') try: data = binascii.unhexlify(text.encode()) except (ValueError, TypeError) as ex: raise dns.exception.SyntaxError('Hex decoding error: %s' % str(ex)) return cls(rdclass, rdtype, data)
def to_wire(self, file): if self.family == 1: address = dns.inet.inet_pton(dns.inet.AF_INET, self.address) elif self.family == 2: address = dns.inet.inet_pton(dns.inet.AF_INET6, self.address) else: address = binascii.unhexlify(self.address) # # Truncate least significant zero bytes. # last = 0 for i in xrange(len(address) - 1, -1, -1): if address[i] != chr(0): last = i + 1 break address = address[0: last] l = len(address) assert l < 128 if self.negation: l |= 0x80 header = struct.pack('!HBB', self.family, self.prefix, l) file.write(header) file.write(address)
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): key_tag = tok.get_uint16() algorithm = tok.get_uint8() digest_type = tok.get_uint8() chunks = [] while 1: t = tok.get().unescape() if t.is_eol_or_eof(): break if not t.is_identifier(): raise dns.exception.SyntaxError chunks.append(t.value.encode()) digest = b''.join(chunks) digest = binascii.unhexlify(digest) return cls(rdclass, rdtype, key_tag, algorithm, digest_type, digest)
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): token = tok.get() if not token.is_identifier() or token.value != '\#': raise dns.exception.SyntaxError( r'generic rdata does not start with \#') length = tok.get_int() chunks = [] while 1: token = tok.get() if token.is_eol_or_eof(): break chunks.append(token.value.encode()) hex = b''.join(chunks) data = binascii.unhexlify(hex) if len(data) != length: raise dns.exception.SyntaxError( 'generic rdata hex data has wrong length') return cls(rdclass, rdtype, data)
def PrivateKeyDecryption(self, encryptedblob, iv, dbkey): magicCmsIV = unhexlify('4adda22c79e82105') plain = kcdecrypt(dbkey, magicCmsIV, encryptedblob) if plain.__len__() == 0: return '' # now we handle the unwrapping. we need to take the first 32 bytes, # and reverse them. revplain = '' for i in range(len(plain)): revplain += plain[len(plain)-1 - i] # now the real key gets found. */ plain = kcdecrypt(dbkey, iv, revplain) #hexdump(plain) Keyname = plain[:12] # Copied Buffer when user click on right and copy a key on Keychain Access keyblob = plain[12:] return Keyname, keyblob ## Documents : http://www.opensource.apple.com/source/securityd/securityd-55137.1/doc/BLOBFORMAT
def _parse_mssql(hash, csize, bsize, handler): """common parser for mssql 2000/2005; returns 4 byte salt + checksum""" if isinstance(hash, unicode): if len(hash) == csize and hash.startswith(UIDENT): try: return unhexlify(hash[6:].encode("utf-8")) except TypeError: # throw when bad char found pass elif isinstance(hash, bytes): # assumes ascii-compat encoding assert isinstance(hash, bytes) if len(hash) == csize and hash.startswith(BIDENT): try: return unhexlify(hash[6:]) except TypeError: # throw when bad char found pass ##elif len(hash) == bsize and hash.startswith(BIDENT2): # raw bytes ## return hash[2:] else: raise uh.exc.ExpectedStringError(hash, "hash") raise uh.exc.InvalidHashError(handler)
def client_encode(self, buf): if self.raw_trans_sent: return buf self.send_buffer += buf if not self.has_sent_header: self.has_sent_header = True data = b"\x03\x03" + os.urandom(32) + binascii.unhexlify(b"000016c02bc02fc00ac009c013c01400330039002f0035000a0100006fff01000100000a00080006001700180019000b0002010000230000337400000010002900270568322d31360568322d31350568322d313402683208737064792f332e3108687474702f312e31000500050100000000000d001600140401050106010201040305030603020304020202") data = b"\x01\x00" + struct.pack('>H', len(data)) + data data = b"\x16\x03\x01" + struct.pack('>H', len(data)) + data return data if self.has_recv_header: ret = self.send_buffer self.send_buffer = b'' self.raw_trans_sent = True return ret return b''
def readPacked8(self, n, data): size = self.readInt8(data) remove = 0 if (size & 0x80) != 0 and n == 251: remove = 1 size = size & 0x7F text = bytearray(self.readArray(size, data)) hexData = binascii.hexlify(str(text) if sys.version_info < (2,7) else text).upper() dataSize = len(hexData) out = [] if remove == 0: for i in range(0, dataSize): char = chr(hexData[i]) if type(hexData[i]) is int else hexData[i] #python2/3 compat val = ord(binascii.unhexlify("0%s" % char)) if i == (dataSize - 1) and val > 11 and n != 251: continue out.append(self.unpackByte(n, val)) else: out = map(ord, list(hexData[0: -remove])) if sys.version_info < (3,0) else list(hexData[0: -remove]) return out