我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用msgpack.Unpacker()。
def msgpack_appendable_pack(o, path): open(path, 'a+').close() # touch with open(path, mode='r+b') as f: packer = msgpack.Packer() unpacker = msgpack.Unpacker(f) if type(o) == list: try: previous_len = unpacker.read_array_header() except msgpack.OutOfData: previous_len = 0 # calculate and replace header header = packer.pack_array_header(previous_len + len(o)) f.seek(0) f.write(header) f.write(bytes(1) * (MAX_MSGPACK_ARRAY_HEADER_LEN - len(header))) # append new elements f.seek(0, 2) for element in o: f.write(packer.pack(element)) else: f.write(packer.pack(o))
def wait_for_at_least_one_message(self): """ Reads until we receive at least one message we can unpack. Return all found messages. """ unpacker = msgpack.Unpacker(encoding='utf-8') while True: chunk = '' try: chunk = self.ssh_stream_stdout.read(1) except Exception as error: self.connection_error(error) raise if chunk == '': # happens only when connection broke. If nothing is to be received, it hangs instead. self.connection_error('Connection broken') return False unpacker.feed(chunk) messages = [m for m in unpacker] if messages: return messages
def test_unpacker_ext_hook(): class MyUnpacker(Unpacker): def __init__(self): super(MyUnpacker, self).__init__(ext_hook=self._hook, encoding='utf-8') def _hook(self, code, data): if code == 1: return int(data) else: return ExtType(code, data) unpacker = MyUnpacker() unpacker.feed(packb({'a': 1}, encoding='utf-8')) assert unpacker.unpack() == {'a': 1} unpacker.feed(packb({'a': ExtType(1, b'123')}, encoding='utf-8')) assert unpacker.unpack() == {'a': 123} unpacker.feed(packb({'a': ExtType(2, b'321')}, encoding='utf-8')) assert unpacker.unpack() == {'a': ExtType(2, b'321')}
def test_foobar(): unpacker = Unpacker(read_size=3, use_list=1) unpacker.feed(b'foobar') assert unpacker.unpack() == ord(b'f') assert unpacker.unpack() == ord(b'o') assert unpacker.unpack() == ord(b'o') assert unpacker.unpack() == ord(b'b') assert unpacker.unpack() == ord(b'a') assert unpacker.unpack() == ord(b'r') with raises(OutOfData): unpacker.unpack() unpacker.feed(b'foo') unpacker.feed(b'bar') k = 0 for o, e in zip(unpacker, 'foobarbaz'): assert o == ord(e) k += 1 assert k == len(b'foobar')
def test_unpack_tell(): stream = io.BytesIO() messages = [2**i-1 for i in range(65)] messages += [-(2**i) for i in range(1, 64)] messages += [b'hello', b'hello'*1000, list(range(20)), {i: bytes(i)*i for i in range(10)}, {i: bytes(i)*i for i in range(32)}] offsets = [] for m in messages: pack(m, stream) offsets.append(stream.tell()) stream.seek(0) unpacker = Unpacker(stream) for m, o in zip(messages, offsets): m2 = next(unpacker) assert m == m2 assert o == unpacker.tell()
def socket_incoming_connection(socket, address): logger.debug('connected %s', address) sockets[address] = socket unpacker = Unpacker(encoding='utf-8') while True: data = socket.recv(4096) if not data: logger.debug('closed connection %s', address) break unpacker.feed(data) for msg in unpacker: receive.put(InMsg(msg, address)) logger.debug('got socket msg: %s', msg) sockets.pop(address)
def _outgoing_connect_cb(self, peer_id, tcp_handle, error): """Called on attempt to make outgoing connection to a peer.""" if error is not None: self._logger.error( "unable to establish connction to peer %d", peer_id, ) del self._outgoing[peer_id] return self._outgoing[peer_id] = Outgoing( tcp_handle, msgpack.Packer(), msgpack.Unpacker(), self._conf[peer_id], {}, ) tcp_handle.start_read(partial(self._outgoing_read_cb, peer_id)) self._logger.info("connect to peer %d", peer_id)
def __init__(self, conf, timeout): self._conf = conf self._timeout = timeout self._unpacker = msgpack.Unpacker() self._packer = msgpack.Packer() # connection variables self._peers = {} # peer_id -> Peer self._sock_to_peer = {} # socket.connection -> Peer self._peers_lock = threading.Lock() # for _peers and _sock_to_peers # request / response variables self._req_count = 0 self._req_count_lock = threading.Lock() # For reuse of ValueEvent objects by a thread. self._threadlocal = threading.local() self._patch_client_for_gevent() self._bg_thread = threading.Thread( target=self._process_requests_in_background ) self._bg_thread.setDaemon(True) self._bg_thread.start()
def req(self, data, timeout): """Sends data to the other side and waits for a response. If no response within timeout period (or connection failure) then raises an error. """ # fixme: handle timeout packer = msgpack.Packer() unpacker = msgpack.Unpacker() with self._sock_lock: # fixme: handle some errors here #print "sending data", data self._sock.sendall(packer.pack(data)) while True: amt = self._sock.recv_into(self._req_buffer, 1000000) if not self._req_buffer: raise ValueError("socket closed fixme: raise real error") unpacker.feed(self._req_mv[:amt]) for m in unpacker: # We only expect a single message in response # because of the synchronous pattern from sendall. return m
def __init__(self): self.decoder = Decoder() self.unpacker = msgpack.Unpacker(object_pairs_hook=OrderedDict)
def msgpack_appendable_unpack(path): # if not list? # return msgpack.unpackb(f.read()) with open(path, 'rb') as f: packer = msgpack.Packer() unpacker = msgpack.Unpacker(f, encoding='utf-8') length = unpacker.read_array_header() header_lenght = len(packer.pack_array_header(length)) unpacker.read_bytes(MAX_MSGPACK_ARRAY_HEADER_LEN - header_lenght) f.seek(MAX_MSGPACK_ARRAY_HEADER_LEN) return [unpacker.unpack() for _ in range(length)]
def load_pack(self, fin, encoding='utf-8'): self._init() unpacker = msgpack.Unpacker() BUFSIZE = 1024 * 1024 while True: buf = fin.read(BUFSIZE) if not buf: break unpacker.feed(buf) for id, word, count in unpacker: word = word.decode(encoding) self.__set(id=id, word=word, count=count) return self
def __init_load(self): self.lines = [] self.line_length_counts = collections.Counter() self.line_length_cumcounts = collections.Counter() unpacker = msgpack.Unpacker() self.fin.seek(0) BUFSIZE = 1024 * 1024 while True: buf = self.fin.read(BUFSIZE) if not buf: break unpacker.feed(buf) for words in unpacker: if self.on_memory: self.lines.append(words) self.line_length_counts[len(words)] += 1 cum = 0 for length, count in sorted(self.line_length_counts.items()): cum += count self.line_length_cumcounts[length] = cum self.fin.seek(0)
def __next(self): self.batch = [] unpacker = msgpack.Unpacker() BUFSIZE = 1024 * 1024 num_lines = 0 while True: buf = self.fin.read(BUFSIZE) if not buf: break unpacker.feed(buf) for words in unpacker: if len(words) > self.max_line_length: continue num_lines += 1 if self.max_num_lines != None and num_lines == self.max_num_lines: break self.batch.append(words) self.num_lines += 1 if len(self.batch) == self.batch_size: self.num_batches += 1 yield self.feed_callback(self.batch) self.batch = [] if self.max_num_lines != None and num_lines == self.max_num_lines: break self.num_epochs += 1 self.fin.seek(0) raise StopIteration
def messageLoop(self): if not self.sock: self.log("Socket error: No socket found") return False self.protocol = "v2" self.updateName() self.connected = True self.unpacker = msgpack.Unpacker() try: while True: buff = self.sock.recv(16 * 1024) if not buff: break # Connection closed # Statistics self.last_recv_time = time.time() self.incomplete_buff_recv += 1 self.bytes_recv += len(buff) self.server.bytes_recv += len(buff) if not self.unpacker: self.unpacker = msgpack.Unpacker() self.unpacker.feed(buff) buff = None for message in self.unpacker: self.incomplete_buff_recv = 0 if "stream_bytes" in message: self.handleStream(message) else: self.handleMessage(message) message = None except Exception, err: if not self.closed: self.log("Socket error: %s" % Debug.formatException(err)) self.close() # MessageLoop ended, close connection # My handshake info
def _unpack_generic(self, expected_version=1): """ Unpacks the 2-element model data structure and returns file-like objects of raw service model and weight manager model. """ (rm, wm) = (BytesIO(), BytesIO()) unp = msgpack.Unpacker(BytesIO(self._m._user_raw)) assert unp.read_array_header() == 2 # <user_container> assert unp.unpack() == expected_version # +- <version> assert unp.read_array_header() == 2 # +- <user_data> unp.unpack(rm.write) # +- (service model) unp.unpack(wm.write) # +- wm_.get_model()->pack(pk) rm.seek(0) wm.seek(0) return rm, wm
def _extract_nn(self, rm): nn = BytesIO() unp = msgpack.Unpacker(rm) assert unp.read_array_header() == 2 # classifier_->pack(pk) unp.unpack(nn.write) # +- nearest_neighbor_engine_->pack(pk) unp.skip() # +- labels_.pack(pk) return nn
def _extract_nn(self, rm): nn = BytesIO() unp = msgpack.Unpacker(rm) assert unp.read_array_header() == 2 # recommender_->pack(pk) unp.skip() # +- orig_.pack(packer) unp.unpack(nn.write) # +- nearest_neighbor_engine_->pack(pk) return nn
def _extract_nn(self, rm): nn = BytesIO() unp = msgpack.Unpacker(rm) assert unp.read_array_header() == 2 # anomaly_->pack(pk) unp.unpack(nn.write) # +- nearest_neighbor_engine_->pack(pk) unp.skip() # +- mixable_scores_->get_model()->pack(packer) return nn
def _extract_recommender(self, rm): nn = BytesIO() unp = msgpack.Unpacker(rm) assert unp.read_array_header() == 2 # anomaly_->pack(pk) unp.skip() # +- mixable_storage_->get_model()->pack(packer) unp.unpack(nn.write) # +- nn_engine_->pack(packer) return nn
def decode_request(self, rpc_request_data): ''' ?rpc ?????????,?????? ''' import protocol, msgpack if_success = False try: unpacker = msgpack.Unpacker(encoding='utf-8') unpacker.feed(rpc_request_data) xid, msg_type, uid, service_name, command_name, args = unpacker.unpack() except: # ?????? log_message = 'rpc request deserialize failed' self._logger.write_log(log_message, 'error') return if_success, None if msg_type != protocol.RPCRequest.MSG_TYPE: # ?????? log_message = 'msg type is not rpc request' self._logger.write_log(log_message, 'error') return if_success, None if_success = True rpc_request = protocol.RPCRequest(xid, uid, service_name, command_name, args) return if_success, rpc_request
def decode_response(self, rpc_response_data): ''' ?rpc ?????????,?????? ''' import protocol, msgpack if_success = False try: unpacker = msgpack.Unpacker(encoding='utf-8') unpacker.feed(rpc_response_data) xid, uid, msg_type, state, result = unpacker.unpack() except: # ?????? log_message = 'rpc response deserialize failed' self._logger.write_log(log_message, 'error') return if_success, None if msg_type != protocol.RPCResponse.MSG_TYPE: # ?????? log_message = 'msg type is not rpc response' self._logger.write_log(log_message, 'error') return if_success, None if_success = True rpc_response = protocol.RPCResponse(xid, uid, state, result) return if_success, rpc_response
def _cache_sync_archive(self, archive_id): log.debug('Started cache sync') add_chunk = self._cache.chunks.add cdata = self._cache.repository.get(archive_id) _, data = self._cache.key.decrypt(archive_id, cdata) add_chunk(archive_id, 1, len(data), len(cdata)) try: archive = ArchiveItem(internal_dict=msgpack.unpackb(data)) except (TypeError, ValueError, AttributeError) as error: log.error('Corrupted/unknown archive metadata: %s', error) return False if archive.version != 1: log.error('Unknown archive metadata version %r', archive.version) return False unpacker = msgpack.Unpacker() for item_id, chunk in zip(archive.items, self._cache.repository.get_many(archive.items)): _, data = self._cache.key.decrypt(item_id, chunk) add_chunk(item_id, 1, len(data), len(chunk)) unpacker.feed(data) for item in unpacker: if not isinstance(item, dict): log.error('Error: Did not get expected metadata dict - archive corrupted!') return False if b'chunks' in item: for chunk_id, size, csize in item[b'chunks']: add_chunk(chunk_id, 1, size, csize) log.debug('Completed cache sync') return True
def __init__(self): super(MessageEncoder, self).__init__() # note: on-wire msgpack has no notion of encoding. # the msgpack-python library implicitly converts unicode to # utf-8 encoded bytes by default. we don't want to rely on # the behaviour though because it seems to be going to change. # cf. https://gist.github.com/methane/5022403 self._packer = msgpack.Packer(encoding=None) self._unpacker = msgpack.Unpacker(encoding=None) self._next_msgid = 0
def __init__(self, socket, outgoing_msg_sink_iter): super(RpcSession, self).__init__("RpcSession(%s)" % socket) import msgpack self._packer = msgpack.Packer() self._unpacker = msgpack.Unpacker() self._next_msgid = 0 self._socket = socket self._outgoing_msg_sink_iter = outgoing_msg_sink_iter
def testPackUnicode(): test_data = ["", "abcd", ["defgh"], "??????? ?????"] for td in test_data: re = unpackb(packb(td, encoding='utf-8'), use_list=1, encoding='utf-8') assert re == td packer = Packer(encoding='utf-8') data = packer.pack(td) re = Unpacker(BytesIO(data), encoding=str('utf-8'), use_list=1).unpack() assert re == td
def testArraySize(sizes=[0, 5, 50, 1000]): bio = BytesIO() packer = Packer() for size in sizes: bio.write(packer.pack_array_header(size)) for i in range(size): bio.write(packer.pack(i)) bio.seek(0) unpacker = Unpacker(bio, use_list=1) for size in sizes: assert unpacker.unpack() == list(range(size))
def test_manualreset(sizes=[0, 5, 50, 1000]): packer = Packer(autoreset=False) for size in sizes: packer.pack_array_header(size) for i in range(size): packer.pack(i) bio = BytesIO(packer.bytes()) unpacker = Unpacker(bio, use_list=1) for size in sizes: assert unpacker.unpack() == list(range(size)) packer.reset() assert packer.bytes() == b''
def testMapSize(sizes=[0, 5, 50, 1000]): bio = BytesIO() packer = Packer() for size in sizes: bio.write(packer.pack_map_header(size)) for i in range(size): bio.write(packer.pack(i)) # key bio.write(packer.pack(i * 2)) # value bio.seek(0) unpacker = Unpacker(bio) for size in sizes: assert unpacker.unpack() == dict((i, i * 2) for i in range(size))
def test_unpack_array_header_from_file(): f = BytesIO(packb([1,2,3,4])) unpacker = Unpacker(f) assert unpacker.read_array_header() == 4 assert unpacker.unpack() == 1 assert unpacker.unpack() == 2 assert unpacker.unpack() == 3 assert unpacker.unpack() == 4 with raises(OutOfData): unpacker.unpack()
def test_partialdata(): unpacker = Unpacker() unpacker.feed(b'\xa5') with raises(StopIteration): next(iter(unpacker)) unpacker.feed(b'h') with raises(StopIteration): next(iter(unpacker)) unpacker.feed(b'a') with raises(StopIteration): next(iter(unpacker)) unpacker.feed(b'l') with raises(StopIteration): next(iter(unpacker)) unpacker.feed(b'l') with raises(StopIteration): next(iter(unpacker)) unpacker.feed(b'o') assert next(iter(unpacker)) == b'hallo'
def test_foobar_skip(): unpacker = Unpacker(read_size=3, use_list=1) unpacker.feed(b'foobar') assert unpacker.unpack() == ord(b'f') unpacker.skip() assert unpacker.unpack() == ord(b'o') unpacker.skip() assert unpacker.unpack() == ord(b'a') unpacker.skip() with raises(OutOfData): unpacker.unpack()
def test_maxbuffersize(): with raises(ValueError): Unpacker(read_size=5, max_buffer_size=3) unpacker = Unpacker(read_size=3, max_buffer_size=3, use_list=1) unpacker.feed(b'fo') with raises(BufferFull): unpacker.feed(b'ob') unpacker.feed(b'o') assert ord('f') == next(unpacker) unpacker.feed(b'b') assert ord('o') == next(unpacker) assert ord('o') == next(unpacker) assert ord('b') == next(unpacker)
def test_issue124(): unpacker = Unpacker() unpacker.feed(b'\xa1?\xa1!') assert tuple(unpacker) == (b'?', b'!') assert tuple(unpacker) == () unpacker.feed(b"\xa1?\xa1") assert tuple(unpacker) == (b'?',) assert tuple(unpacker) == () unpacker.feed(b"!") assert tuple(unpacker) == (b'!',) assert tuple(unpacker) == ()
def test_exceeding_unpacker_read_size(): dumpf = io.BytesIO() packer = msgpack.Packer() NUMBER_OF_STRINGS = 6 read_size = 16 # 5 ok for read_size=16, while 6 glibc detected *** python: double free or corruption (fasttop): # 20 ok for read_size=256, while 25 segfaults / glibc detected *** python: double free or corruption (!prev) # 40 ok for read_size=1024, while 50 introduces errors # 7000 ok for read_size=1024*1024, while 8000 leads to glibc detected *** python: double free or corruption (!prev): for idx in range(NUMBER_OF_STRINGS): data = gen_binary_data(idx) dumpf.write(packer.pack(data)) f = io.BytesIO(dumpf.getvalue()) dumpf.close() unpacker = msgpack.Unpacker(f, read_size=read_size, use_list=1) read_count = 0 for idx, o in enumerate(unpacker): assert type(o) == bytes assert o == gen_binary_data(idx) read_count += 1 assert read_count == NUMBER_OF_STRINGS
def test_read_array_header(): unpacker = Unpacker() unpacker.feed(packb(['a', 'b', 'c'])) assert unpacker.read_array_header() == 3 assert unpacker.unpack() == b'a' assert unpacker.unpack() == b'b' assert unpacker.unpack() == b'c' try: unpacker.unpack() assert 0, 'should raise exception' except OutOfData: assert 1, 'okay'
def test_read_map_header(): unpacker = Unpacker() unpacker.feed(packb({'a': 'A'})) assert unpacker.read_map_header() == 1 assert unpacker.unpack() == B'a' assert unpacker.unpack() == B'A' try: unpacker.unpack() assert 0, 'should raise exception' except OutOfData: assert 1, 'okay'
def test_incorrect_type_map(): unpacker = Unpacker() unpacker.feed(packb(1)) try: unpacker.read_map_header() assert 0, 'should raise exception' except UnexpectedTypeException: assert 1, 'okay'
def test_correct_type_nested_array(): unpacker = Unpacker() unpacker.feed(packb({'a': ['b', 'c', 'd']})) try: unpacker.read_array_header() assert 0, 'should raise exception' except UnexpectedTypeException: assert 1, 'okay'
def test_incorrect_type_nested_map(): unpacker = Unpacker() unpacker.feed(packb([{'a': 'b'}])) try: unpacker.read_map_header() assert 0, 'should raise exception' except UnexpectedTypeException: assert 1, 'okay'
def test_write_bytes(): unpacker = Unpacker() unpacker.feed(b'abc') f = io.BytesIO() assert unpacker.unpack(f.write) == ord('a') assert f.getvalue() == b'a' f = io.BytesIO() assert unpacker.skip(f.write) is None assert f.getvalue() == b'b' f = io.BytesIO() assert unpacker.skip() is None assert f.getvalue() == b''
def test_write_bytes_multi_buffer(): long_val = (5) * 100 expected = packb(long_val) unpacker = Unpacker(io.BytesIO(expected), read_size=3, max_buffer_size=3) f = io.BytesIO() unpacked = unpacker.unpack(f.write) assert unpacked == long_val assert f.getvalue() == expected
def msgpack_unpack_msg_from_socket(socket): unpacker = msgpack.Unpacker() while True: buf = socket.recv(1024) if not buf: break unpacker.feed(buf) try: item = unpacker.next() except StopIteration: pass else: return item
def __init__(self): self.heartBeatCnt = 0 self.subHost = None self.subPort = None self.tcpConn = None self.unpacker = msgpack.Unpacker(encoding='utf-8')
def onMessage(self, Connection, Data): try: self.unpacker.feed(Data) for result in self.unpacker: Domoticz.Debug("Got: %s" % result) if 'exception' in result: return if result['cmd'] == 'status': UpdateDevice(self.statusUnit, (1 if result['state_code'] in [5, 6, 11] else 0), # ON is Cleaning, Back to home, Spot cleaning self.states.get(result['state_code'], 'Undefined') ) UpdateDevice(self.batteryUnit, result['battery'], str(result['battery']), result['battery'], AlwaysUpdate=(self.heartBeatCnt % 100 == 0)) if Parameters['Mode5'] == 'dimmer': UpdateDevice(self.fanDimmerUnit, 2, str(result['fan_level'])) # nValue=2 for show percentage, instead ON/OFF state else: level = {38: 10, 60: 20, 77: 30, 90: 40}.get(result['fan_level'], None) if level: UpdateDevice(self.fanSelectorUnit, 1, str(level)) elif result['cmd'] == 'consumable_status': mainBrush = cPercent(result['main_brush'], 300) sideBrush = cPercent(result['side_brush'], 200) filter = cPercent(result['filter'], 150) sensors = cPercent(result['sensor'], 30) UpdateDevice(self.cMainBrushUnit, mainBrush, str(mainBrush), AlwaysUpdate=True) UpdateDevice(self.cSideBrushUnit, sideBrush, str(sideBrush), AlwaysUpdate=True) UpdateDevice(self.cFilterUnit, filter, str(filter), AlwaysUpdate=True) UpdateDevice(self.cSensorsUnit, sensors, str(sensors), AlwaysUpdate=True) except msgpack.UnpackException as e: Domoticz.Error('Unpacker exception [%s]' % str(e))
def __init__(self): self.chunks = [] self.unpacker = msgpack.Unpacker() self.exception = None
def setUp(self): self.sck = StubWriterSocket() self.writer = MessageWriter(self.sck) self.unpacker = msgpack.Unpacker()
def __init__(self, sck): self._sck = sck self._unpacker = msgpack.Unpacker()
def new_messages(self, timeout=1): """ Generator: generates 0 or more tuples containing message type and message body (as a dict). May generate 0 events in certain conditions even if there are events available. (If the socket returns EAGAIN, for example.) :param timeout: Maximum time to block waiting on the socket before giving up. No exception is raised upon timeout but 0 events are generated. :raises SocketClosed if the socket is closed. :raises socket.error if an unexpected socket error occurs. """ if timeout is not None: read_ready, _, _ = select.select([self._sck], [], [], timeout) if not read_ready: return try: data = self._sck.recv(16384) except socket.error as e: if e.errno in (errno.EAGAIN, errno.EWOULDBLOCK, errno.EINTR): _log.debug("Retryable error on read.") return else: _log.error("Failed to read from socket: %r", e) raise if not data: # No data indicates an orderly shutdown of the socket, # which shouldn't happen. _log.error("Socket closed by other end.") raise SocketClosed() # Feed the data into the Unpacker, if it has enough data it will then # generate some messages. self._unpacker.feed(data) for msg in self._unpacker: _log.debug("Unpacked message: %s", msg) # coverage.py doesn't fully support yield statements. yield msg[MSG_KEY_TYPE], msg # pragma: nocover
def _incoming_connection(self, server, error): """Called on a remote client's attempt to connect to this Node.""" if error is not None: return client = pyuv.TCP(self._loop) server.accept(client) self._incoming[client] = Incoming( msgpack.Packer(), msgpack.Unpacker(), ) client.start_read(self._incoming_read)