我们从Python开源项目中,提取了以下18个代码示例,用于说明如何使用asyncio.PriorityQueue()。
def __init__( self, connections, dead_timeout=60, timeout_cutoff=5, selector_class=RoundRobinSelector, randomize_hosts=True, *, loop, **kwargs ): self._dead_timeout = dead_timeout self.timeout_cutoff = timeout_cutoff self.connection_opts = connections self.connections = [c for (c, _) in connections] self.orig_connections = set(self.connections) self.dead = asyncio.PriorityQueue(len(self.connections), loop=loop) self.dead_count = collections.Counter() self.loop = loop if randomize_hosts: random.shuffle(self.connections) self.selector = selector_class(dict(connections))
def using_queues(): q = asyncio.Queue() q.put_nowait('Hello') await q.get() await q.put('world') q.get_nowait() pq = asyncio.PriorityQueue() stack = asyncio.LifoQueue()
def test_order(self): q = asyncio.PriorityQueue(loop=self.loop) for i in [1, 3, 2]: q.put_nowait(i) items = [q.get_nowait() for _ in range(3)] self.assertEqual([1, 2, 3], items)
def __init__(self, loop=None, external_cb=None): self.loop = loop if loop else asyncio._get_running_loop() self.tasks_queue = asyncio.PriorityQueue() self.results_queue = asyncio.Queue(maxsize=10) self._process_next = asyncio.Event() self._dispatcher_task = None # Pass cb weakref to prevent gc in some cases and let dispatcher finish all tasks. # Cb takes 2 positional arguments: task result and task exception. self._external_cb = external_cb if callable(external_cb) else None
def __init__(self, settings): self._queue = asyncio.PriorityQueue() self._exceptions = False self._total_queued = 0
def __init__(self, fetcher, parser, saver, url_filter=None, loop=None): """ constructor """ BasePool.__init__(self, fetcher, parser, saver, url_filter=url_filter) self._loop = loop or asyncio.get_event_loop() # event_loop from parameter or call asyncio.get_event_loop() self._queue = asyncio.PriorityQueue(loop=self._loop) # (priority, url, keys, deep, repeat) self._start_time = None # start time of this pool self._running_tasks = 0 # the count of running tasks return
def __init__(self, bot): self.bot = bot self.events = fileIO('data/scheduler/events.json', 'load') self.queue = asyncio.PriorityQueue(loop=self.bot.loop) self.queue_lock = asyncio.Lock() self.to_kill = {} self._load_events()
def __init__(self) -> None: self._messages: asyncio.PriorityQueue = None
def _init_asyncio(self): self._loop = ( asyncio.new_event_loop() if asyncio._get_running_loop() is None # type: ignore else asyncio.get_event_loop() ) self._messages = asyncio.PriorityQueue(loop=self._loop)
def __init__(self, sub: List[MachineBase]=List(), parent=None, name=None, debug=False) -> None: self.sub = sub self.debug = debug self.data = None self._messages: asyncio.PriorityQueue = None self.message_log = List() ModularMachine.__init__(self, parent, name=name)
def __init__(self, loop): self.q = asyncio.PriorityQueue(loop=loop) self.ridealong = {} self.awaiting_work = 0 self.maxhostqps = None self.delta_t = None self.remaining_url_budget = None self.next_fetch = cachetools.ttl.TTLCache(10000, 10) # 10 seconds good enough for QPS=0.1 and up self.frozen_until = cachetools.ttl.TTLCache(10000, 10) # 10 seconds is longer than our typical delay self.maxhostqps = float(config.read('Crawl', 'MaxHostQPS')) self.delta_t = 1./self.maxhostqps self.remaining_url_budget = int(config.read('Crawl', 'MaxCrawledUrls') or 0) or None # 0 => None
def load(self, crawler, f): header = pickle.load(f) # XXX check that this is a good header... log it self.ridealong = pickle.load(f) crawler._seeds = pickle.load(f) self.q = asyncio.PriorityQueue() count = pickle.load(f) for _ in range(0, count): work = pickle.load(f) self.q.put_nowait(work)
def __init__(self, bot): self.current = None self.voice = None self.bot = bot self.play_next_song = asyncio.Event() self.skip_votes = set() # a set of user_ids that voted self.audio_player = self.bot.loop.create_task(self.audio_player_task()) self.playerheat = {} # keep track of how often each user requests. ------------- self.queue = [] # easily track the songs without messing with threads if self.bot.music_priorityqueue: self.songs = asyncio.PriorityQueue() # gotta keep priority ----------------- else: self.songs = asyncio.Queue() main_loop = asyncio.get_event_loop() main_loop.create_task(self.loop_cooldown())
def __init__(self, **kwargs): super().__init(**kwargs) self._pending = asyncio.PriorityQueue() # We have to override _restart as well because _get removes the entry.
def __init__(self, connections, *, dead_timeout=60, timeout_cutoff=5, selector_factory=RoundRobinSelector, loop): self._dead_timeout = dead_timeout self._timeout_cutoff = timeout_cutoff self._selector = selector_factory(None) self._dead = asyncio.PriorityQueue(len(connections), loop=loop) self._dead_count = collections.Counter() self._connections = connections self._loop = loop
def build(tasks: Dict[Source, Task], dry: bool = False, njobs: int = 1) -> None: """Build tasks. This is the main entry point. """ print('Scanning files...') tree = get_tree(tasks) try: with open(cachefile) as f: hashes: Dict[Filename, Hash] = json.load(f)['hashes'] except (ValueError, FileNotFoundError): hashes = {} changed_files = [src for src in tasks if tree.hashes[src] != hashes.get(src)] print(f'Changed files: {len(changed_files)}/{len(tasks)}.') if not changed_files or dry: return task_queue: TaskQueue = PriorityQueue() result_queue: ResultQueue = Queue() loop = asyncio.get_event_loop() workers = [ loop.create_task(worker(task_queue, result_queue)) for _ in range(njobs) ] try: loop.run_until_complete( scheduler(tasks, task_queue, result_queue, tree, hashes, changed_files) ) except CompilationError as e: print(f'error: Compilation of {e.source} returned {e.retcode}.') raise except: print() raise else: print() finally: for tsk in workers: tsk.cancel() with open(cachefile, 'w') as f: json.dump({'hashes': hashes}, f) if DEBUG: print_clocks()