我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用dill.loads()。
def promise_executor_job(job, promise): """ Toil job that runs a promise with an executor. Executes the executor and rejects/resolves the promise. Returns the promise's success result and error, as a pair. """ executor = dill.loads(promise.executor_dill) # Run the executor, and handle resolution/rejection, possibly scheduling # child jobs executor(lambda result: promise.handle_resolve(job, result), lambda err: promise.handle_reject(job, err)) # Grab the cached result and return it return (promise.result, promise.err)
def _handle_loopback(self, message): """ Handle incoming messages in the loopback socket. """ header, data = dill.loads(message) if header == 'EXECUTE_METHOD': method, args, kwargs = data try: response = getattr(self, method)(*args, **kwargs) except Exception as error: yield format_method_exception(error, method, args, kwargs) raise yield response or True else: error = 'Unrecognized loopback message: {} {}'.format(header, data) self.log_error(error) yield error
def handler(reader, writer): queries = await reader.read(-1) try: queries = dill.loads(queries) shelf = queries.pop(0) result = QueryHandler(db, shelf, queries).run() result = dill.dumps(result) except: print("Unexpected error:", sys.exc_info()[1]) result = dill.dumps(sys.exc_info()[1]) writer.write(result) await writer.drain() writer.close() raise writer.write(result) await writer.drain() writer.close()
def deepcopy(self): """ Return a deep copy of the batch. Constructs a new ``Batch`` instance and then recursively copies all the objects found in the original batch, except the ``pipeline``, which remains unchanged. Returns ------- Batch """ pipeline = self.pipeline self.pipeline = None dump_batch = dill.dumps(self) self.pipeline = pipeline restored_batch = dill.loads(dump_batch) restored_batch.pipeline = pipeline return restored_batch
def test_pickle(self): import sys if sys.version_info < (3, 4): import dill as pickle else: import pickle states = ['A', 'B', 'C', 'D'] # Define with list of dictionaries transitions = [ {'trigger': 'walk', 'source': 'A', 'dest': 'B'}, {'trigger': 'run', 'source': 'B', 'dest': 'C'}, {'trigger': 'sprint', 'source': 'C', 'dest': 'D'} ] m = Machine(states=states, transitions=transitions, initial='A') m.walk() dump = pickle.dumps(m) self.assertIsNotNone(dump) m2 = pickle.loads(dump) self.assertEqual(m.state, m2.state) m2.run()
def p_b_ot(): params = json.loads(cherrypy.request.headers['params']) body = cherrypy.request.body.read() event_stream = False try: body = json.loads(body) if 'js_func' in body and body['js_func']: js_func = True else: js_func = False if 'event_stream' in body and body['event_stream']: event_stream = True except json.JSONDecodeError: body = pickle.loads(body) js_func = False if 'type' in params: obj_type = params['type'] else: obj_type = None return params, body, obj_type, js_func, event_stream
def restore_state(self, path): """Returns loaded state""" try: with open(path, 'rb') as f: if self.encrypt: state = pickle.loads(self.decrypt_data(pickle.load(f))) else: state = pickle.load(f) LOG.debug("Restoring state successs") except Exception as e: LOG.debug("Restoring state from %s failed with %s" % ( path, e)) state = StateMachine(self.bot, state_path=path) LOG.debug("Successfully inicialized new state.") return state
def restore_state(self, path): """Returns loaded state""" tmp_file = io.BytesIO() self.bucket.download_fileobj(path, tmp_file) if self.encrypt: data = self.encrypt_data(tmp_file.getvalue()) else: data = tmp_file.getvalue() state = pickle.loads(str(data, encoding="utf-8")) state.bot = self.bot return state
def deserialize(name_d, func_code_d, args_d, clos_d, type_obj): """A function to deserialize an object serialized with the serialize function. Args: name_d(unicode): the dumped name of the object func_code_d(unicode): the dumped byte code of the function args_d(unicode): the dumped information about the arguments clos_d(unicode): the dumped information about the function closure Returns: a deserialized object""" if type_obj == 'func': name = pickle.loads(name_d.encode('raw_unicode_escape')) code = dill.loads(func_code_d.encode('raw_unicode_escape')) args = pickle.loads(args_d.encode('raw_unicode_escape')) clos = dill.loads(clos_d.encode('raw_unicode_escape')) loaded_obj = types.FunctionType(code, globals(), name, args, clos) else: # pragma: no cover loaded_obj = dill.loads(func_code_d.encode('raw_unicode_escape')) return loaded_obj # Serialization utilities
def run_dill_encoded(what): fun, args = dill.loads(what) return fun(*args)
def download_var_cloud(data_name): if not isinstance(data_name, str): print("data_name must be a string") return user_hash = settings.API_KEY url = 'http://%s/api/save/getDownloadUrl' % settings.CATALEARN_URL r = requests.post(url, data={ 'type': 'variable', 'user_hash': user_hash, 'file_name': data_name }) if r.status_code != 200: raise RuntimeError(r.text) presigned_url = r.content # Now send the post request to the catalearn server res = requests.get(presigned_url, stream=True) raw = io.BytesIO() download_progress(res, raw, data_name) result = dill.loads(raw.getvalue()) return result
def promise_then_job(job, promise, prev_promise_returned): """ Toil job that runs a promise created with a then handler instead of an executor. Takes the promise and the (resolve value, reject value) pair from the previous promise. Returns the promise's success result and error, as a pair. """ then_handler = dill.loads(promise.then_dill) resolved, rejected = prev_promise_returned if rejected is None: # Actually run this child promise try: # Get the result from the then handler and resolve with it result = then_handler(resolved) promise.handle_resolve(job, result) except Exception as e: # Reject with an error if there is one Logger.error("".join(traceback.format_exception(*sys.exc_info()))) promise.handle_reject(job, e) else: # Parent promise rejected so we should not run # Bubble up the error promise.handle_reject(job, rejected) return (promise.result, promise.err)
def _handle_loopback_safe(self, data): """ Handle incoming messages in the _loopback_safe socket. """ method, args, kwargs = dill.loads(data) try: response = getattr(self, method)(*args, **kwargs) except Exception as error: yield format_method_exception(error, method, args, kwargs) raise yield response
def _execute_tasks(self): self._poll() while not self._queue.empty(): task = dill.loads(self._queue.get()) _try_callback(self, task)
def import_object(obj): import dill as pickle import base64 # if obj is None: # obj = sys.stdin.read().strip().encode('utf-8') if obj is str: obj = obj.strip().encode('utf-8') return pickle.loads(gzip.zlib.decompress(base64.b64decode(obj)))
def load(self): """a private method that loads an estimator object from the filesystem""" if self.is_file_persisted: self.object_file.open() temp = dill.loads(self.object_file.read()) self.set_object(temp) self.object_file.close()
def deserialize(data): return dill.loads(data)
def decode(self, x): return json.loads(x)
def _load_blosc(self, ix, src=None, components=None): """ Load data from a blosc packed file """ file_name = self._get_file_name(ix, src, 'blosc') with open(file_name, 'rb') as f: data = dill.loads(blosc.decompress(f.read())) if self.components is None: components = (data.keys()[0],) else: components = tuple(components or self.components) item = tuple(data[i] for i in components) return item
def decompress(compressed_workload_state): return pickle.loads(zlib.decompress(compressed_workload_state))
def test_pickle(self): import sys if sys.version_info < (3, 4): import dill as pickle else: import pickle # go to non initial state B self.stuff.to_B() # pickle Stuff model dump = pickle.dumps(self.stuff) self.assertIsNotNone(dump) stuff2 = pickle.loads(dump) self.assertTrue(stuff2.machine.is_state("B")) # check if machines of stuff and stuff2 are truly separated stuff2.to_A() self.stuff.to_C() self.assertTrue(stuff2.machine.is_state("A")) thread = Thread(target=stuff2.forward) thread.start() # give thread some time to start time.sleep(0.01) # both objects should be in different states # and also not share locks begin = time.time() # stuff should not be locked and execute fast self.assertTrue(self.stuff.machine.is_state("C")) fast = time.time() # stuff2 should be locked and take about 1 second # to be executed self.assertTrue(stuff2.machine.is_state("B")) blocked = time.time() self.assertAlmostEqual(fast-begin, 0, delta=0.1) self.assertAlmostEqual(blocked-begin, 1, delta=0.1) # Same as TestLockedTransition but with LockedHierarchicalMachine
def test_pickle(self): import sys if sys.version_info < (3, 4): import dill as pickle else: import pickle states = ['A', 'B', {'name': 'C', 'children': ['1', '2', {'name': '3', 'children': ['a', 'b', 'c']}]}, 'D', 'E', 'F'] transitions = [ {'trigger': 'walk', 'source': 'A', 'dest': 'B'}, {'trigger': 'run', 'source': 'B', 'dest': 'C'}, {'trigger': 'sprint', 'source': 'C', 'dest': 'D'} ] m = self.stuff.machine_cls(states=states, transitions=transitions, initial='A') m.walk() dump = pickle.dumps(m) self.assertIsNotNone(dump) m2 = pickle.loads(dump) self.assertEqual(m.state, m2.state) m2.run() if State.separator in '_': m2.to_C_3_a() m2.to_C_3_b() else: m2.to_C.s3.a() m2.to_C.s3.b()
def MDLSTM_train(params): func,args= dill.loads(params) X_arr,model,dict_conv_param,grd_truth_seq,reg= args[0] return func(X_arr,model,dict_conv_param,grd_truth_seq,reg)
def MDLSTM_val(params): func,args= dill.loads(params) X_arr,model,dict_conv_param,rand_no,grd_truth_seq,reg= args[0] return func(X_arr,model,dict_conv_param,grd_truth_seq,reg),rand_no
def load(cls, filepath): tmpdir = tempfile.mkdtemp() error = None logger.debug('extracting archive to: {}'.format(tmpdir)) try: with tarfile.open(filepath, 'r:*') as ar: ar.extractall(tmpdir) payload = open(os.path.join(tmpdir, 'metadata'), 'rb').read() print(payload) meta = dill.loads(payload) instance = cls() for attr_name, attr_val in meta.items(): setattr(instance, attr_name, attr_val) if os.path.exists(os.path.join(tmpdir, 'vectors')): logger.debug('loading word vectors') import h5py with h5py.File(os.path.join(tmpdir, 'vectors'), 'r') as h5: setattr(instance, '_W', h5['vectors'][:]) else: logger.debug('no word vectors found in archive') except Exception as e: logger.error('encountered error: {}'.format(e)) error = e finally: logger.debug('cleaning up {}'.format(tmpdir)) shutil.rmtree(tmpdir) if error is not None: raise error return instance
def _get_function_and_execute(f_dill, *args): f = dill.loads(f_dill) return f(*args)
def update(self, g_id, dbid, head, conn): params, body, obj_type, js_func, event_stream = p_b_ot() if obj_type + 's' in acceptable_types: obj_id, uid, id_quote = id_or_uid(obj_type, params['obj_id']) update = body['update'] msg = errors['Nonexistence'][obj_type](g_id, obj_id) qu = r.db(dbid).table(obj_type + 's') if not uid: try: dd = auto_reql(qu.get(obj_id).update(update), conn) except r.ReqlNonExistenceError: return json.dumps({'error': msg}) else: dd = auto_reql(qu.get_all(obj_id, index='uid').update(update), conn) return json.dumps(dd) elif obj_type in acceptable_types: update = body['update'] def literalize(d): for k, v in d.iteritems(): if isinstance(v, dict): literalize(v) else: m = literal_check.search(v) if m: d[k] = r.literal(json.loads(m.group('json_doc'))) literalize(update) qu = r.db(dbid).table(obj_type) if 'get_all' in body: if 'index' in body: qu = qu.get_all(*body['get_all'], index=body['index']) else: qu = qu.get_all(*body['get_all']) if 'filter' in body: filt_func = body['filter'] if js_func: filt_func = r.js(filt_func) qu = qu.filter(filt_func) d = auto_reql(qu.update(update), conn) return json.dumps(d)
def graph_filter(self, g_id, dbid, head, conn): body = pickle.loads(cherrypy.request.body.read()) filters = body['filter'] if 'nodes' in filters and filters['nodes'] is not None: nf_name = str(uuid4()) nfn = node_property_map(g_id, nf_name, 'bool', filters['nodes'], conn)['property_map'] nf = property_maps[g_id][nfn] else: nf = None if 'links' in filters and filters['links'] is not None: lf_name = str(uuid4()) lfn = link_property_map(g_id, lf_name, 'bool', filters['links'], conn)['property_map'] lf = property_maps[g_id][lfn] else: lf = None if 'directed' in filters and filters['directed'] is not None: directed = filters['directed'] else: directed = None if 'reversed' in filters and filters['reversed'] is not None: rev = filters['reversed'] else: rev = False if 'filter_id' in filters and filters['filter_id'] is not None: g2_id = filters['filter_id'] else: g2_id = str(uuid4()).replace('-', '_') g2 = gt.GraphView(graphs[g_id], vfilt=nf, efilt=lf, directed=directed, reversed=rev) graphs[g2_id] = g2 prep_pm(g2_id) for pm in property_maps[g_id]: property_maps[g2_id][pm] = property_maps[g_id][pm] for nda in ndarrays[g_id]: ndarrays[g2_id][nda] = ndarrays[g_id][nda] return json.dumps({'subgraph': g2_id})
def fields(self, g_id, dbid, head, conn): obj_type = json.loads(head['params'])['type'] if obj_type in acceptable_types: fields = auto_reql(r.db(dbid).table(obj_type).map(lambda n: n.keys()).reduce( lambda x, y: r.expr(x + y).distinct()), conn) return json.dumps(fields)
def convert(name, converter, process, shell): return dill.loads(converter)(get_env(shell.user_ns)[name])
def getRepresentation(name, process): obj_class = getClass(name, process) converters = pythonwhat.State.State.root_state.converters if obj_class in converters: repres = convert(name, dill.dumps(converters[obj_class]), process) if (errored(repres)): return ReprFail("manual conversion failed") else: return repres else: # first try to pickle try: stream = getStreamPickle(name, process) if not errored(stream): return pickle.loads(stream) except: pass # if it failed, try to dill try: stream = getStreamDill(name, process) if not errored(stream): return dill.loads(stream) return ReprFail("dilling inside process failed for %s - write manual converter" % obj_class) except PicklingError: return ReprFail("undilling of bytestream failed with PicklingError - write manual converter") except Exception as e: return ReprFail("undilling of bytestream failed for class %s - write manual converter." "Error: %s - %s" % (obj_class, type(e), e))
def loads(x): """deserialize python object(s)""" try: return dill.loads(x) except Exception as e: logger.exception(e) raise
def read(stream): """read data from a stream""" # hack for `stream.readuntil` buffer = b'' while True: buffer += yield from stream.readexactly(1) if buffer.endswith(sentinel): break msg = buffer[:-len(sentinel)] msg = loads(msg) return msg
def setup_method(self): import dill random = str(uuid.uuid4()).replace('-', '') filename = '{}_{}'.format(self.__class__.__name__, random) self.queue = PersistentQueue(filename, loads=dill.loads, dumps=dill.dumps)
def setup_method(self): import msgpack random = str(uuid.uuid4()).replace('-', '') filename = '{}_{}'.format(self.__class__.__name__, random) self.queue = PersistentQueue(filename, loads=msgpack.unpackb, dumps=msgpack.packb)
def check(obj): if not CHECK_SERIALIZATION: return try: dill.loads(dill.dumps(obj)) except Exception as e: logging.error( "Couldn't serialize: %s\n'%s'\nBad objects:\n%s" % ( str(obj), str(e), dill.detect.badobjects(obj, depth=2))) raise
def loads(s): return dill.loads(s)
def run_dill_encode(payload): fun,args=dill.loads(payload) return fun(*args)
def __init__(self, map=map, mapper_pickles=False): super().__init__() self.map = map self.pickle, self.unpickle = ((identity, identity) if mapper_pickles else (pickle.dumps, pickle.loads))
def _run_pickled(pickled): function, item = dill.loads(pickled) return function(item)
def safe_load(s): try: o = dill.loads(s) return o except: return None # return functions that execute as expected on example inputs
def loader(pkl,args): f = dill.loads(pkl) return f(*args)
def exceptions(self): bugs = self.meta.cache.id2bugs(self.__meta_id__) out = [] for b in bugs: b["args"] = dill.loads(b["args"]) b["exception"] = dill.loads(b["exception"]) out.append(b) return out
def loads(clz, dump_str: str): """?????? Parameters: dump_str (str): - ???????? Returns: clz: - ??????????????? """ return dill.loads(base64.b64decode(dump_str))
def _loads(data): """ Decompress and deserialize. """ return dill.loads(zlib.decompress(data))
def promise_then_job_fn_job(job, promise, *args, **kwargs): """ Toil job that runs a promise created with a then_job_fn handler. Takes the promise, and the arguments to forward along to the handler, the last of which is the (result, error) pair from the last promise which gets processed to just a result. Returns the promise's success result and error, as a pair. """ # The pickled handler in this case takes a bunch of arguments: the Toil job, # and the success result from the last promise, and then any other arguments # or kwargs that the user wanted to pass along. then_handler = dill.loads(promise.then_dill) # Pull out the results from the last promise resolved, rejected = args[-1] args = args[:-1] if rejected is None: # Actually run this child promise # Stick the resolved value on args = list(args) + [resolved] try: # Get the result from the then handler and resolve with it result = then_handler(job, *args, **kwargs) promise.handle_resolve(job, result) except Exception as e: # Reject with an error if there is one Logger.error("".join(traceback.format_exception(*sys.exc_info()))) promise.handle_reject(job, e) else: # Parent promise rejected so we should not run # Bubble up the error promise.handle_reject(job, rejected) return (promise.result, promise.err)