我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用collections.deque()。
def __init__(self, *args, **kwargs): self._generator = Task.__get_generator(self, *args, **kwargs) self._name = '!' + self._generator.__name__ self._id = id(self) self._state = None self._value = None self._exceptions = [] self._callers = [] self._timeout = None self._daemon = False self._complete = None self._msgs = collections.deque() self._monitors = set() self._swap_generator = None self._hot_swappable = False if not Task._pycos: Pycos.instance() if not Task._pycos: Task._pycos = Pycos.instance() self._scheduler = self.__class__._pycos self._location = None self._scheduler._add(self)
def __init__(self, size): self.not_in_cache = not_in_cache = object() cache = {} key_fifo = collections.deque([], size) def get(self, key): return cache.get(key, not_in_cache) def set(self, key, value): cache[key] = value if len(cache) > size: cache.pop(key_fifo.popleft(), None) key_fifo.append(key) def clear(self): cache.clear() key_fifo.clear() self.get = types.MethodType(get, self) self.set = types.MethodType(set, self) self.clear = types.MethodType(clear, self) # argument cache for optimizing repeated calls when backtracking through recursive expressions
def add(self, event, subscriber, append=True): """ Add a subscriber for an event. :param event: The name of an event. :param subscriber: The subscriber to be added (and called when the event is published). :param append: Whether to append or prepend the subscriber to an existing subscriber list for the event. """ subs = self._subscribers if event not in subs: subs[event] = deque([subscriber]) else: sq = subs[event] if append: sq.append(subscriber) else: sq.appendleft(subscriber)
def __init__(self, cpe_str): """Create a new CPE object that represents the cpe_str :param str cpe_str: The cpe string """ self.part = "" self.vendor = "" self.product = "" self.version = "" self.update = "" self.edition = "" if cpe_str.startswith("cpe:/"): cpe_str = cpe_str.replace("cpe:/", "") elif cpe_str.startswith("cpe:2.3:"): cpe_str = cpe_str.replace("cpe:2.3:", "") else: raise CPEException("Invalid cpe string {!r}".format(cpe_str)) parts = deque(cpe_str.split(":")) to_set = deque(self.attrs) while len(parts) > 0 and len(to_set) > 0: next_attr = to_set.popleft() setattr(self, next_attr, parts.popleft())
def apply_delta(self, delta): """Apply delta to our state and return a copy of the affected object as it was before and after the update, e.g.: old_obj, new_obj = self.apply_delta(delta) old_obj may be None if the delta is for the creation of a new object, e.g. a new application or unit is deployed. new_obj will never be None, but may be dead (new_obj.dead == True) if the object was deleted as a result of the delta being applied. """ history = ( self.state .setdefault(delta.entity, {}) .setdefault(delta.get_id(), collections.deque()) ) history.append(delta.data) if delta.type == 'remove': history.append(None) entity = self.get_entity(delta.entity, delta.get_id()) return entity.previous(), entity
def get_entity( self, entity_type, entity_id, history_index=-1, connected=True): """Return an object instance for the given entity_type and id. By default the object state matches the most recent state from Juju. To get an instance of the object in an older state, pass history_index, an index into the history deque for the entity. """ if history_index < 0 and history_index != -1: history_index += len(self.entity_history(entity_type, entity_id)) if history_index < 0: return None try: self.entity_data(entity_type, entity_id, history_index) except IndexError: return None entity_class = get_entity_class(entity_type) return entity_class( entity_id, self.model, history_index=history_index, connected=connected)
def __init__(self, entity_id, model, history_index=-1, connected=True): """Initialize a new entity :param entity_id str: The unique id of the object in the model :param model: The model instance in whose object tree this entity resides :history_index int: The index of this object's state in the model's history deque for this entity :connected bool: Flag indicating whether this object gets live updates from the model. """ self.entity_id = entity_id self.model = model self._history_index = history_index self.connected = connected self.connection = model.connection()
def __init__ (self, sock=None, map=None): # for string terminator matching self.ac_in_buffer = '' # we use a list here rather than cStringIO for a few reasons... # del lst[:] is faster than sio.truncate(0) # lst = [] is faster than sio.truncate(0) # cStringIO will be gaining unicode support in py3k, which # will negatively affect the performance of bytes compared to # a ''.join() equivalent self.incoming = [] # we toss the use of the "simple producer" and replace it with # a pure deque, which the original fifo was a wrapping of self.producer_fifo = deque() asyncore.dispatcher.__init__ (self, sock, map)
def intersperse(its): iters = collections.deque(iter(i) for i in its) N = len(iters) idxs = collections.deque(range(N)) rets = [None] * N while iters: try: i = iters.popleft() idx = idxs.popleft() yield next(i) idxs.append(idx) iters.append(i) except StopIteration as e: rets[idx] = e.value return tuple(rets)
def _collect_refs(self, item_val, acc=None, no_opt=False): if acc is None: acc = deque() from gramfuzz.fields import Opt if no_opt and (isinstance(item_val, Opt) or item_val.shortest_is_nothing): return acc from gramfuzz.fields import Ref if isinstance(item_val, Ref): acc.append(item_val) if hasattr(item_val, "values"): for val in item_val.values: self._collect_refs(val, acc) return acc # -------------------------------------- # public, but intended for internal use # --------------------------------------
def add_definition(self, cat, def_name, def_val, no_prune=False, gram_file="default"): """Add a new rule definition named ``def_name`` having value ``def_value`` to the category ``cat``. :param str cat: The category to add the rule to :param str def_name: The name of the rule definition :param def_val: The value of the rule definition :param bool no_prune: If the rule should explicitly *NOT* be pruned even if it has been determined to be unreachable (default=``False``) :param str gram_file: The file the rule was defined in (default=``"default"``). """ self._rules_processed = False self.add_to_cat_group(cat, gram_file, def_name) if no_prune: self.no_prunes.setdefault(cat, {}).setdefault(def_name, True) if self._staged_defs is not None: # if we're tracking changes during rule generation, add any new rules # to _staged_defs so they can be reverted if something goes wrong self._staged_defs.append((cat, def_name, def_val)) else: self.defs.setdefault(cat, {}).setdefault(def_name, deque()).append(def_val)
def bfs(graph, func, head, reverse=None): """ BREADTH FIRST SEARCH IF func RETURNS FALSE, THEN NO MORE PATHS DOWN THE BRANCH ARE TAKEN IT'S EXPECTED func TAKES 3 ARGUMENTS node - THE CURRENT NODE IN THE path - PATH FROM head TO node graph - THE WHOLE GRAPH todo - WHAT'S IN THE QUEUE TO BE DONE """ todo = deque() # LIST OF PATHS todo.append(Step(None, head)) while True: path = todo.popleft() keep_going = func(path.node, Path(path), graph, todo) if keep_going: todo.extend(Step(path, c) for c in graph.get_children(path.node))
def __init__(self, args): self._peer_addr = (args.target_ip, 6778) self._transport = None self._send_handle = None self._stat_handle = None self._ledbat = LEDBAT() self._loop = asyncio.get_event_loop() self._next_id = 1 self._in_flight = set() self._ret_control = collections.deque(5*[None], 5) self._start_time = None self._int_time = None self._sent_data = 0 self._int_data = 0 self._num_retrans = 0 self._int_retrans = 0 self._delays = collections.deque(10*[None], 10)
def search_node_subg(node, location_to_reads, read_to_locations, seen): """ Extract the complete independent subgraph given a node, and add all subgraph nodes into the record. Returns the nodes in this subgraph and updated record. """ if node in seen: return None, seen tmp_net=set() queue=[node] while(len(queue)>0): map(tmp_net.add, queue) map(seen.add, queue) new_queue=deque([]) for x in queue: x_reads=location_to_reads[x] new_queue.extend([next_node for x_read in x_reads for next_node in read_to_locations[x_read] if not next_node in tmp_net]) queue=list(set(new_queue)) subg=list(set(tmp_net)) return subg, seen
def rewind(self): """Rewind this cursor to its unevaluated state. Reset this cursor if it has been partially or completely evaluated. Any options that are present on the cursor will remain in effect. Future iterating performed on this cursor will cause new queries to be sent to the server, even if the resultant data has already been retrieved by this cursor. """ self.__data = deque() self.__id = None self.__address = None self.__retrieved = 0 self.__killed = False return self
def _object_hook(dictionary): object_view = ObjectView(dictionary) stack = deque() stack.append((None, None, dictionary)) while len(stack): instance, member_name, dictionary = stack.popleft() for name, value in dictionary.iteritems(): if isinstance(value, dict): stack.append((dictionary, name, value)) if instance is not None: instance[member_name] = ObjectView(dictionary) return object_view
def __init__(self, *args, **kwargs): Connection.__init__(self, *args, **kwargs) self.deque = deque() self._deque_lock = Lock() self._connect_socket() self._socket.setblocking(0) with self._libevloop._lock: self._read_watcher = libev.IO(self._socket.fileno(), libev.EV_READ, self._libevloop._loop, self.handle_read) self._write_watcher = libev.IO(self._socket.fileno(), libev.EV_WRITE, self._libevloop._loop, self.handle_write) self._send_options_message() self._libevloop.connection_created(self) # start the global event loop if needed self._libevloop.maybe_start()
def __init__(self, *args, **kwargs): Connection.__init__(self, *args, **kwargs) self.deque = deque() self.deque_lock = Lock() self._connect_socket() asyncore.dispatcher.__init__(self, self._socket, _dispatcher_map) self._writable = True self._readable = True self._send_options_message() # start the event loop if needed self._loop.maybe_start()
def handle_write(self): while True: with self.deque_lock: try: next_msg = self.deque.popleft() except IndexError: self._writable = False return try: sent = self.send(next_msg) self._readable = True except socket.error as err: if (err.args[0] in NONBLOCKING): with self.deque_lock: self.deque.appendleft(next_msg) else: self.defunct(err) return else: if sent < len(next_msg): with self.deque_lock: self.deque.appendleft(next_msg[sent:]) if sent == 0: return
def SLOPE(self, param): class Context: def __init__(self, N): self.N = N self.q = deque([], self.N) self.x = [i for i in range(self.N)] def handleInput(self, value): if len(self.q) < self.N: self.q.append(value) return 0 self.q.append(value) z1 = np.polyfit(self.x, self.q, 1) return z1[0] ctx = Context(param[1]) result = param[0].apply(ctx.handleInput) return result
def FORCAST(self, param): class Context: def __init__(self, N): self.N = N self.q = deque([], self.N) self.x = [i for i in range(self.N)] def handleInput(self, value): if len(self.q) < self.N: self.q.append(value) return np.NaN z1 = np.polyfit(self.x, self.q, 1) fn = np.poly1d(z1) y = fn(self.N + 1) self.q.append(value) return y ctx = Context(param[1]) result = param[0].apply(ctx.handleInput) return result #????
def append(self, event_key, propagate): target = event_key.dispatch_target assert isinstance(target, type), \ "Class-level Event targets must be classes." stack = [target] while stack: cls = stack.pop(0) stack.extend(cls.__subclasses__()) if cls is not target and cls not in self._clslevel: self.update_subclass(cls) else: if cls not in self._clslevel: self._clslevel[cls] = collections.deque() self._clslevel[cls].append(event_key._listen_fn) registry._stored_in_collection(event_key, self)
def iterate(obj, opts): """traverse the given expression structure, returning an iterator. traversal is configured to be breadth-first. """ # fasttrack for atomic elements like columns children = obj.get_children(**opts) if not children: return [obj] traversal = deque() stack = deque([obj]) while stack: t = stack.popleft() traversal.append(t) for c in t.get_children(**opts): stack.append(c) return iter(traversal)
def unwrap_order_by(clause): """Break up an 'order by' expression into individual column-expressions, without DESC/ASC/NULLS FIRST/NULLS LAST""" cols = util.column_set() stack = deque([clause]) while stack: t = stack.popleft() if isinstance(t, ColumnElement) and \ ( not isinstance(t, UnaryExpression) or not operators.is_ordering_modifier(t.modifier) ): if isinstance(t, _label_reference): t = t.element if isinstance(t, (_textual_label_reference)): continue cols.add(t) else: for c in t.get_children(): stack.append(c) return cols
def dispatch_repr(self, obj, recursive): if obj is helper: return u'<span class="help">%r</span>' % helper if isinstance(obj, (integer_types, float, complex)): return u'<span class="number">%r</span>' % obj if isinstance(obj, string_types): return self.string_repr(obj) if isinstance(obj, RegexType): return self.regex_repr(obj) if isinstance(obj, list): return self.list_repr(obj, recursive) if isinstance(obj, tuple): return self.tuple_repr(obj, recursive) if isinstance(obj, set): return self.set_repr(obj, recursive) if isinstance(obj, frozenset): return self.frozenset_repr(obj, recursive) if isinstance(obj, dict): return self.dict_repr(obj, recursive) if deque is not None and isinstance(obj, deque): return self.deque_repr(obj, recursive) return self.object_repr(obj)
def splitby(pred, seq): trues = deque() falses = deque() iseq = iter(seq) def pull(source, pred, thisval, thisbuf, otherbuf): while 1: while thisbuf: yield thisbuf.popleft() newitem = next(source) # uncomment next line to show that source is processed only once # print "pulled", newitem if pred(newitem) == thisval: yield newitem else: otherbuf.append(newitem) true_iter = pull(iseq, pred, True, trues, falses) false_iter = pull(iseq, pred, False, falses, trues) return true_iter, false_iter
def __init__(self,env, is_batch_norm): self.env = env self.num_states = env.observation_space.shape[0] self.num_actions = env.action_space.shape[0] if is_batch_norm: self.critic_net = CriticNet_bn(self.num_states, self.num_actions) self.actor_net = ActorNet_bn(self.num_states, self.num_actions) else: self.critic_net = CriticNet(self.num_states, self.num_actions) self.actor_net = ActorNet(self.num_states, self.num_actions) #Initialize Buffer Network: self.replay_memory = deque() #Intialize time step: self.time_step = 0 self.counter = 0 action_max = np.array(env.action_space.high).tolist() action_min = np.array(env.action_space.low).tolist() action_bounds = [action_max,action_min] self.grad_inv = grad_inverter(action_bounds)
def __init__ (self, ctx, *args): self.ctx = ctx self.ServiceName = "com.sun.star.linguistic2.Proofreader" self.ImplementationName = "org.openoffice.comp.pyuno.Lightproof." + gce.pkg self.SupportedServiceNames = (self.ServiceName, ) self.locales = [] for i in gce.locales: l = gce.locales[i] self.locales.append(Locale(l[0], l[1], l[2])) self.locales = tuple(self.locales) xCurCtx = uno.getComponentContext() # init gce.load() # GC options # opt_handler.load(xCurCtx) dOpt = Options.load(xCurCtx) gce.setOptions(dOpt) # store for results of big paragraphs self.dResult = {} self.nMaxRes = 1500 self.lLastRes = deque(maxlen=self.nMaxRes) self.nRes = 0 # XServiceName method implementations
def level_order(tree, include_all=False): """ Returns an iterator over the tree in level-order If include_all is set to True, empty parts of the tree are filled with dummy entries and the iterator becomes infinite. """ q = deque() q.append(tree) while q: node = q.popleft() yield node if include_all or node.left: q.append(node.left or node.__class__()) if include_all or node.right: q.append(node.right or node.__class__())
def __init__( self, normalization_parameters, parameters, ): self._quantile_states = collections.deque( maxlen=parameters.action_budget.window_size ) self._quantile = 100 - parameters.action_budget.action_limit self.quantile_value = 0 self._limited_action = np.argmax( np.array(parameters.actions) == parameters.action_budget.limited_action ) self._discount_factor = parameters.rl.gamma self._quantile_update_rate = \ parameters.action_budget.quantile_update_rate self._quantile_update_frequency = \ parameters.action_budget.quantile_update_frequency self._update_counter = 0 super(self.__class__, self).__init__(normalization_parameters, parameters) self._max_q = parameters.rl.maxq_learning
def rtask_avg_proc(channel, threshold, trend_task, window_size, task=None): import collections # subscribe to channel (at client) yield channel.subscribe(task) # create circular buffer data = collections.deque(maxlen=window_size) for i in range(window_size): data.append(0.0) cumsum = 0.0 # first message is 'start' command; see 'client_proc' assert (yield task.receive()) == 'start' while True: i, n = yield task.receive() if n is None: break cumsum += (n - data[0]) avg = (cumsum / window_size) if avg > threshold: trend_task.send((i, 'high', float(avg))) elif avg < -threshold: trend_task.send((i, 'low', float(avg))) data.append(n) raise StopIteration(0) # This generator function is sent to remote dispycos process to save # the received data in a file (on the remote peer).
def __init__(self, task): """Categorize messages to task 'task'. """ self._task = task self._categories = {None: collections.deque()} self._categorize = []
def receive(self, category=None, timeout=None, alarm_value=None): """Similar to 'receive' of Task, except it retrieves (waiting, if necessary) messages in given 'category'. """ # assert Pycos.cur_task() == self._task c = self._categories.get(category, None) if c: msg = c.popleft() raise StopIteration(msg) if timeout: start = _time() while 1: msg = yield self._task.receive(timeout=timeout, alarm_value=alarm_value) if msg == alarm_value: raise StopIteration(msg) for categorize in self._categorize: c = categorize(msg) if c == category: raise StopIteration(msg) if c is not None: bucket = self._categories.get(c, None) if not bucket: bucket = self._categories[c] = collections.deque() bucket.append(msg) break else: self._categories[None].append(msg) if timeout: now = _time() timeout -= now - start start = now
def ntscInitPrev(): global ntscOutput ntscOutput = deque([(128, 128, 128) for p in xrange(4)])
def make_names_deque(): names_deque = deque() with open("yob2015.txt") as f: reader = csv.DictReader(f, fieldnames=fields) for row in reader: names_deque.appendleft(row["name"]) return names_deque # 101 ms