我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用heapq.heappop()。
def kNN_entity(self, vec, topk=10, method=0, self_vec_id=None): q = [] for i in range(len(self.vec_e)): #skip self if self_vec_id != None and i == self_vec_id: continue if method == 1: dist = SP.distance.cosine(vec, self.vec_e[i]) else: dist = LA.norm(vec - self.vec_e[i]) if len(q) < topk: HP.heappush(q, self.index_dist(i, dist)) else: #indeed it fetches the biggest tmp = HP.nsmallest(1, q)[0] if tmp.dist > dist: HP.heapreplace(q, self.index_dist(i, dist) ) rst = [] while len(q) > 0: item = HP.heappop(q) rst.insert(0, (self.vocab_e[self.vec2e[item.index]], item.dist)) return rst #given entity name, find kNN
def update_evaluations(formatter, # type: CodeFormatter evaluations, # type: List[AttemptResult] finished_styles, # type: List[AttemptResult] bestdist # type: Sequence[int] ): # type: (...) -> Tuple[bool, bool, Sequence[int]] attemptresult = heapq.heappop(evaluations) nested_round = False if bestdist is None or (distquality(attemptresult.distance) < distquality(bestdist)): bestdist = attemptresult.distance heapq.heappush(evaluations, attemptresult) else: # We found a style that could no longer be improved by adding a single option value. heapq.heappush(finished_styles, attemptresult) nested_styles = formatter.nested_derivations(attemptresult.formatstyle) if not nested_styles: # This formatstyle does not unlock more options. return True, nested_round, bestdist # Restart the optimization from scratch with the attemptresult augmented with # every nested option as seed styles. bestdist = None ndist = (HUGE_DISTANCE, HUGE_DISTANCE, HUGE_DISTANCE, HUGE_DISTANCE) evaluations[:] = [AttemptResult(ndist, s) for s in nested_styles] nested_round = True return False, nested_round, bestdist
def __next__(self): if not self._pq: raise StopIteration pair = heapq.heappop(self._pq) t = pair.x for i in range(self._n): v = t[i] + 1 tp = t[:i] + (v,) + t[i + 1:] if v > self.max_sizes[i]: if i not in self.continuation: self.continuation[i] = tp continue if tp not in self._memo: pair = PairGen._Pair(*tp) heapq.heappush(self._pq, pair) self._memo[tp] = True return t
def need_processing(self): """A utility to help determine which connections need processing. Returns a triple of lists containing those connections that 0) need to read from the network, 1) need to write to the network, 2) waiting for pending timers to expire. The timer list is sorted with the connection next expiring at index 0. """ readers = [] writers = [] timer_heap = [] for c in iter(self._connections.values()): if c.needs_input > 0: readers.append(c) if c.has_output > 0: writers.append(c) if c.deadline: heapq.heappush(timer_heap, (c.next_tick, c)) timers = [] while timer_heap: x = heapq.heappop(timer_heap) timers.append(x[1]) return (readers, writers, timers)
def pop(self): _, smallest = heapq.heappop(self.heap) self._remove_from_dict(smallest) return smallest
def dijkstra(adj, costs, s, t): ''' Return predecessors and min distance if there exists a shortest path from s to t; Otherwise, return None ''' Q = [] # priority queue of items; note item is mutable. d = {s: 0} # vertex -> minimal distance Qd = {} # vertex -> [d[v], parent_v, v] p = {} # predecessor visited_set = set([s]) for v in adj.get(s, []): d[v] = costs[s, v] item = [d[v], s, v] heapq.heappush(Q, item) Qd[v] = item while Q: print Q cost, parent, u = heapq.heappop(Q) if u not in visited_set: print 'visit:', u p[u]= parent visited_set.add(u) if u == t: return p, d[u] for v in adj.get(u, []): if d.get(v): if d[v] > costs[u, v] + d[u]: d[v] = costs[u, v] + d[u] Qd[v][0] = d[v] # decrease key Qd[v][1] = u # update predecessor heapq._siftdown(Q, 0, Q.index(Qd[v])) else: d[v] = costs[u, v] + d[u] item = [d[v], u, v] heapq.heappush(Q, item) Qd[v] = item return None
def _merge(sorted_iterables): """ >>> list(_merge([(1, 2, 3), (0, 2, 4)])) [0, 1, 2, 2, 3, 4] """ heap = [] for iterable in sorted_iterables: iterator = iter(iterable) for value in iterator: heapq.heappush(heap, (value, iterator)) break while heap: value, iterator = heapq.heappop(heap) yield value for value in iterator: heapq.heappush(heap, (value, iterator)) break
def imerge(iterables): _hpop, _hreplace, _Stop = (heappop, heapreplace, StopIteration) h = [] h_append = h.append for itnum, it in enumerate(map(iter, iterables)): try: nx = it.next h_append([nx(), itnum, nx]) except _Stop: pass heapify(h) while 1: try: while 1: v, itnum, nx = s = h[0] yield v s[0] = nx() _hreplace(h, s) except _Stop: _hpop(h) except IndexError: return
def kNN_relation(self, vec, topk=10, method=0, self_vec_id=None): q = [] for i in range(len(self.vec_r)): #skip self if self_vec_id != None and i == self_vec_id: continue if method == 1: dist = SP.distance.cosine(vec, self.vec_r[i]) else: dist = LA.norm(vec - self.vec_r[i]) if len(q) < topk: HP.heappush(q, self.index_dist(i, dist)) else: #indeed it fetches the biggest tmp = HP.nsmallest(1, q)[0] if tmp.dist > dist: HP.heapreplace(q, self.index_dist(i, dist) ) rst = [] while len(q) > 0: item = HP.heappop(q) rst.insert(0, (self.vocab_r[self.vec2r[item.index]], item.dist)) return rst #given relation name, find kNN
def process(self): """ Trouve un des plus courts chemins entre la case de depart et celle d'arrivee """ heappush(self.open_list, (self.start.f, self.start)) while len(self.open_list): f, cell = heappop(self.open_list) self.closed_list.add(cell) if cell is self.end: break neighbor_cells = self.get_neighbor_cells(cell) for n_cell in neighbor_cells: if n_cell.not_obstacle and n_cell not in self.closed_list: if (n_cell.f, n_cell) in self.open_list: if n_cell.g > cell.g + 10: self.update_cell(n_cell, cell) else: self.update_cell(n_cell, cell) heappush(self.open_list, (n_cell.f, n_cell)) return self.display_path()
def rebalance_heaps(): """Ensures that no element in the upper half is less than any element in the lower half.""" if not (len(lower_half_max_heap) >= 1 and len(upper_half_min_heap) >= 1): return # Don't need to rebalance if one of them is empty. max_from_lower_half = lower_half_max_heap[0] * -1 min_from_upper_half = upper_half_min_heap[0] if min_from_upper_half < max_from_lower_half: # Upper half has smaller elements than that of lower half. while min_from_upper_half < max_from_lower_half: # Swap elements. max_from_lower_half = heapq.heappop(lower_half_max_heap) * -1 min_from_upper_half = heapq.heappop(upper_half_min_heap) heapq.heappush(lower_half_max_heap, min_from_upper_half * -1) heapq.heappush(upper_half_min_heap, max_from_lower_half) # heapq will handle restructuring heap so smallest values will be at index 0. max_from_lower_half = lower_half_max_heap[0] * -1 min_from_upper_half = upper_half_min_heap[0]
def dijkstra(graph, source): pq = [] nodes = graph.get_all_vertices() distances = [math.inf] * len(nodes) path = [-1] * len(nodes) distances[source.index] = 0 for node in nodes: # Store as (priority, task) tuples, heapq will sort on first element. heapq.heappush(pq, (distances[node.index], node)) while pq: # Assumes non negative weights, so when popping a node it is the best way to get there. dist, node = heapq.heappop(pq) for edge in graph.get_adjacent_edges(node): # Note: can't terminate early and do this. # Eg: (s) -3-> (c) -12-> (d) # \-20->(d) will be wrong # if distances[edge.destination.index] != math.inf: # We already have the shortest path to this node. # continue if relax(node, edge.destination, edge, distances): # Found a better way to get to a next node, add that to the pq and set the parent. heapq.heappush(pq, (distances[edge.destination.index], edge.destination)) path[edge.destination.index] = node.index return distances, path # Shortest path from source to any other node in distances.
def dijkstra(s, t, g): q = [] dis = collections.defaultdict(lambda: 0x7fffffff) # [0x7fffffff] * len(g) vis = collections.defaultdict(bool) # [False] * len(g) dis[s] = 0 heapq.heappush(q, Node(s, 0)) while q: cur = heapq.heappop(q).to if vis[cur]: continue vis[cur] = True for to, val in g[cur]: if not vis[to] and dis[cur] + val < dis[to]: dis[to] = dis[cur] + val heapq.heappush(q, Node(to, dis[to])) return dis
def huffman(freq): """Huffman code :param freq: dictionary with frequencies for each item :returns: dictionary with binary code string for each item :complexity: O(n log n) """ h = [] for a in freq: heappush(h, (freq[a], a)) while len(h) > 1: (fl, l) = heappop(h) (fr, r) = heappop(h) heappush(h, (fl + fr, [l, r])) code = {} extract(code, h[0][1]) return code
def addNum(self, num): """ Adds a num into the data structure. :type num: int :rtype: void """ left = self.left right = self.right if self.median is None: self.median = num return if num <= self.median: heapq.heappush(left, -num) else: heapq.heappush(right, num) if len(left) > len(right) + 1: top = -heapq.heappop(left) heapq.heappush(right, self.median) self.median = top if len(right) > len(left) + 1: top = heapq.heappop(right) heapq.heappush(left, -self.median) self.median = top
def getNewsFeed(self, userId): """ Retrieve the 10 most recent tweet ids in the user's news feed. Each item in the news feed must be posted by users who the user followed or by the user herself. Tweets must be ordered from most recent to least recent. :type userId: int :rtype: List[int] """ ret = [] heap = [] if self.tweets[userId]: heapq.heappush(heap, self.tweets[userId][-1]) for followeeId in self.friendship[userId]: if self.tweets[followeeId]: heapq.heappush(heap, self.tweets[followeeId][-1]) cnt = 10 while heap and cnt > 0: _, tid, uid, idx = heapq.heappop(heap) ret.append(tid) if idx > 0: heapq.heappush(heap, self.tweets[uid][idx - 1]) cnt -= 1 return ret
def kthSmallest(self, matrix, k): """ :type matrix: List[List[int]] :type k: int :rtype: int """ visited = {(0, 0)} heap = [(matrix[0][0], (0, 0))] while heap: val, (i, j) = heapq.heappop(heap) k -= 1 if k == 0: return val if i + 1 < len(matrix) and (i + 1, j) not in visited: heapq.heappush(heap, (matrix[i + 1][j], (i + 1, j))) visited.add((i + 1, j)) if j + 1 < len(matrix) and (i, j + 1) not in visited: heapq.heappush(heap, (matrix[i][j + 1], (i, j + 1))) visited.add((i, j + 1))
def mergeKLists(self, lists): """ :type lists: List[ListNode] :rtype: ListNode """ heap = [] p = dummy = ListNode(-1) for i in xrange(0, len(lists)): node = lists[i] if not node: continue heapq.heappush(heap, (node.val, node)) while heap: value, node = heapq.heappop(heap) p.next = node p = p.next if node.next: node = node.next heapq.heappush(heap, (node.val, node)) return dummy.next
def basic_usage(): h=[] heapq.heappush(h,10) print h heapq.heappush(h,1) print h heapq.heappush(h,0) print h heapq.heappush(h,3) print h heapq.heappush(h,6) print h heapq.heappush(h,7) print h heapq.heappush(h,11) print h heapq.heappush(h,9) print h heapq.heappush(h,7) print h heapq.heappush(h,2) print h p=heapq.heappop(h) print h print p
def heatmap(self, source_x,source_y): log.debug("Updating Heatmap.") for tile in self.grid.values(): tile.value = sys.maxsize tile.previous = None tile.visited = False l_open = [] source = self.lookup(source_x,source_y) if source: l_open.append((0,source,source)) #(value,cell,previous) while l_open: value,cell,previous = heapq.heappop(l_open) if cell.visited: continue cell.visited = True cell.previous = previous cell.value = value for x,y in self.get_neighbor_addrs(cell.x,cell.y): c = self.lookup(x,y) if c and not (c.visited or c.blocked): heapq.heappush(l_open, (value+1, c, cell)) return self.render(0,self.width, 0,self.height,heatp=True)
def main_loop(self): while True: self.took_turn = False self.timer, next_actor = heapq.heappop(self.event_queue) if isinstance(next_actor, Player): while True: self.draw_screen() try: c = self.main.getch() msg = self.keybindings[c]["function"](**self.keybindings[c]["args"]) except KeyError: continue else: if msg: self.add_message(msg) self.add_event(next_actor) self.current_level.heatmap(self.player.x, self.player.y) break else: msg = next_actor.take_turn(self.current_level) if msg: if msg == "Game over.": self.save_game() self.msg_handler.new_message(Message(msg)) self.add_event(next_actor)
def dijkstra(graph, s): n = len(graph.keys()) dist = dict() Q = list() for v in graph: dist[v] = inf dist[s] = 0 heappush(Q, (dist[s], s)) while Q: d, u = heappop(Q) if d < dist[u]: dist[u] = d for v in graph[u]: if dist[v] > dist[u] + graph[u][v]: dist[v] = dist[u] + graph[u][v] heappush(Q, (dist[v], v)) return dist
def median_stream_ints(arr): less_than_med = [] greater_than_med = [] for i in range(len(arr)): print 'Median after adding:', arr[i] if len(less_than_med) == 0 and len(greater_than_med) == 0: heapq.heappush(less_than_med, -arr[i]) # max-heap in Python implemented by negation. elif arr[i] < abs(less_than_med[0]): heapq.heappush(less_than_med, -arr[i]) if len(less_than_med) - len(greater_than_med) > 1: root = abs(heapq.heappop(less_than_med)) heapq.heappush(greater_than_med, root) elif len(greater_than_med) == 0 or arr[i] > greater_than_med[0]: heapq.heappush(greater_than_med, arr[i]) if len(greater_than_med) - len(less_than_med) > 1: root = heapq.heappop(greater_than_med) heapq.heappush(less_than_med, -root) if len(less_than_med) == len(greater_than_med): print (greater_than_med[0] - less_than_med[0]) / 2 else: print abs(less_than_med[0]) if len(less_than_med) > len(greater_than_med) else greater_than_med[0]
def __next__(self): try: self.dt = advance_iterator(self.gen) except StopIteration: if self.genlist[0] is self: heapq.heappop(self.genlist) else: self.genlist.remove(self) heapq.heapify(self.genlist)
def __iter__(self): """ Sort the results by Whoosh rank; relevance. """ _iter = super(QueryProxy, self).__iter__() if self._whoosh_results is None or self._order_by is not False: return _iter ordered = [] for row in _iter: # we have to convert the primary-key, as stored in the SQL database # into a string because the key is stored as an `ID` in whoosh. # The ID field is string only; plus, this allows for uuid pk's. str_pk = str(getattr(row, self._pk)) heapq.heappush( ordered, (self._whoosh_results[str_pk], row)) def inner(): while ordered: yield heapq.heappop(ordered)[1] return inner()
def _create_clusterings(self,dendrogram): clusterings = [[(-(dendrogram.distance),dendrogram)]] for threshold in np.linspace(-self._max_dist,-self._min_dist,self.number_of_steps)[1:]: new_clustering = clusterings[-1][:] # set new clustering to be equivalent to previous # Expand previous clustering while new_clustering[0][0] < threshold and new_clustering[0][1].is_cluster(): expanded_cluster = heapq.heappop(new_clustering)[1] left = expanded_cluster.left right = expanded_cluster.right if left.is_cluster(): heapq.heappush(new_clustering,(-left.distance,left)) else: heapq.heappush(new_clustering,(-(self._min_dist-1),left)) if right.is_cluster(): heapq.heappush(new_clustering,(-right.distance,right)) else: heapq.heappush(new_clustering,(-(self._min_dist-1),right)) clusterings.append(new_clustering) return clusterings
def nthSuperUglyNumber(self, n, primes): """ :type n: int :type primes: List[int] :rtype: int """ uglies, idx, heap, ugly_set = [0] * n, [0] * len(primes), [], set([1]) uglies[0] = 1 for k, p in enumerate(primes): heapq.heappush(heap, (p, k)) ugly_set.add(p) for i in xrange(1, n): uglies[i], k = heapq.heappop(heap) while (primes[k] * uglies[idx[k]]) in ugly_set: idx[k] += 1 heapq.heappush(heap, (primes[k] * uglies[idx[k]], k)) ugly_set.add(primes[k] * uglies[idx[k]]) return uglies[-1]
def nthUglyNumber(self, n): """ :type n: int :rtype: int """ l=[1] primes=[2,3,5] heapq.heapify(l) set1=set(l) n-=1 while n>0: z=heapq.heappop(l) n-=1 for i in primes: if z*i not in set1: heapq.heappush(l, z*i) set1.add(z*i) return heapq.heappop(l)
def get_skyline(LRH): """ Wortst Time Complexity: O(NlogN) :type buildings: List[List[int]] :rtype: List[List[int]] """ skyline, live = [], [] i, n = 0, len(LRH) while i < n or live: if not live or i < n and LRH[i][0] <= -live[0][1]: x = LRH[i][0] while i < n and LRH[i][0] == x: heapq.heappush(live, (-LRH[i][2], -LRH[i][1])) i += 1 else: x = -live[0][1] while live and -live[0][1] <= x: heapq.heappop(live) height = len(live) and -live[0][0] if not skyline or height != skyline[-1][1]: skyline += [x, height], return skyline
def _timer(self): if not self._queue or not self._waiters: return ts = heapq.nsmallest(1, self._queue)[0][0] t = ts - time.time() if t < 0: score, f = self._waiters.pop(0) ts, val = heapq.heappop(self._queue) if score: f.set_result((val, ts)) else: f.set_result(val) else: await asyncio.sleep(t, loop=self.loop) self._future = self.loop.create_task(self._timer())
def explore_frontier(sig, data_type, metric, max_size): # closure for binding sig and rules easily expand = producers.generate_expander(sig) # generate appropriate starting node based on type start_node = producers.e.Un(producers.parse_type(data_type)) frontier = [(metric(start_node), start_node)] # go hog wild while True: # pull expression off heap, check expansions m, expr = heapq.heappop(frontier) expansions = expand(expr) # if there are expansions, just add them back in if expansions: for new_expr in expansions: heapq.heappush(frontier, (metric(new_expr), new_expr) ) # if there aren't any expansions and the program is closed, we're done elif len(expr._values_from_kind("un")) == 0: yield m[0], expr #------------------------------------------------------------------------------ # now we treat this module as a script - time to execute! #------------------------------------------------------------------------------
def run(self): self._initialize_kernel() while len(self._candidate_pieces) > 0: _, (position, piece_id), relative_piece = heapq.heappop(self._candidate_pieces) if position in self._taken_positions: continue # If piece is already placed, find new piece candidate and put it back to # priority queue if piece_id in self._kernel: self.add_piece_candidate(relative_piece[0], relative_piece[1], position) continue self._put_piece_to_kernel(piece_id, position)
def create_beam_items(self, beam): beam_items = [] for b in xrange(len(beam)): config = beam[b] prevScore = config.score dense_feats = self.template.feat_template(config.nodes, config.stack, config.b0) pr_scores = self.clf.predict_proba(dense_feats)[0] pr_scores = np.log(pr_scores) predictions = zip(pr_scores, self.clf.classes_) valid_trans = self.get_valid_transitions(config) for score, (action, label) in predictions: if self.transitions[action] in valid_trans: next_transition = valid_trans[self.transitions[action]] heapq.heappush( beam_items, (prevScore + score, b, next_transition, label)) if len(beam_items) > self.beamwidth: heapq.heappop(beam_items) return beam_items
def find_k_nearest(self, k, s_query): ''' Gets KNN using heap sort ''' neighbors = [] for n in self.nodes: dist = np.linalg.norm(s_query - n.state) if dist > 0: hn = HeapNode(n, dist) heapq.heappush(neighbors, hn) #Check if we have <= k to choose from #in which case, simply return that list ret_list = [] if len(neighbors) <= k: for k in neighbors: ret_list.append(k.node) return ret_list #get the k nearest neighbors for i in xrange(k): t = heapq.heappop(neighbors) ret_list.append(t.node) return ret_list
def __iter__(self): if not self.intervals: return while True: e = heappop(self.intervals) next_run = e.point[0] heappush(self.intervals, e.shift()) yield next_run, e.action
def accumulate(self, predictions, actuals, num_positives=None): """Accumulate the predictions and their ground truth labels. After the function call, we may call peek_ap_at_n to actually calculate the average precision. Note predictions and actuals must have the same shape. Args: predictions: a list storing the prediction scores. actuals: a list storing the ground truth labels. Any value larger than 0 will be treated as positives, otherwise as negatives. num_positives = If the 'predictions' and 'actuals' inputs aren't complete, then it's possible some true positives were missed in them. In that case, you can provide 'num_positives' in order to accurately track recall. Raises: ValueError: An error occurred when the format of the input is not the numpy 1-D array or the shape of predictions and actuals does not match. """ if len(predictions) != len(actuals): raise ValueError("the shape of predictions and actuals does not match.") if not num_positives is None: if not isinstance(num_positives, numbers.Number) or num_positives < 0: raise ValueError("'num_positives' was provided but it wan't a nonzero number.") if not num_positives is None: self._total_positives += num_positives else: self._total_positives += numpy.size(numpy.where(actuals > 0)) topk = self._top_n heap = self._heap for i in range(numpy.size(predictions)): if topk is None or len(heap) < topk: heapq.heappush(heap, (predictions[i], actuals[i])) else: if predictions[i] > heap[0][0]: # heap[0] is the smallest heapq.heappop(heap) heapq.heappush(heap, (predictions[i], actuals[i]))
def merge_by_key(bam_filenames, key_func, bam_out): file_cache = tk_cache.FileHandleCache(mode='rb', open_func=pysam.Samfile) total_reads = 0 heap = [] for bam_filename in bam_filenames: try: bam = file_cache.get(bam_filename) first_read = bam.next() heapq.heappush(heap, (key_func(first_read), first_read, bam_filename)) except StopIteration: pass while len(heap) > 0: # Get the minimum item and write it to the bam. key, read, bam_filename = heapq.heappop(heap) bam = file_cache.get(bam_filename) bam_out.write(read) total_reads += 1 # Get the next read from the source bam we just wrote from # If that BAM file is out of reads, then we leave that one out try: next_read = bam.next() heapq.heappush(heap, (key_func(next_read), next_read, bam_filename)) except StopIteration: pass return total_reads
def run(self): """Execute events until the queue is empty. When there is a positive delay until the first event, the delay function is called and the event is left in the queue; otherwise, the event is removed from the queue and executed (its action function is called, passing it the argument). If the delay function returns prematurely, it is simply restarted. It is legal for both the delay function and the action function to to modify the queue or to raise an exception; exceptions are not caught but the scheduler's state remains well-defined so run() may be called again. A questionable hack is added to allow other threads to run: just after an event is executed, a delay of 0 is executed, to avoid monopolizing the CPU when other threads are also runnable. """ # localize variable access to minimize overhead # and to improve thread safety q = self._queue delayfunc = self.delayfunc timefunc = self.timefunc pop = heapq.heappop while q: time, priority, action, argument = checked_event = q[0] now = timefunc() if now < time: delayfunc(time - now) else: event = pop(q) # Verify that the event was not removed or altered # by another thread after we last looked at q[0]. if event is checked_event: action(*argument) delayfunc(0) # Let other threads run else: heapq.heappush(q, event)
def queue(self): """An ordered list of upcoming events. Events are named tuples with fields for: time, priority, action, arguments """ # Use heapq to sort the queue rather than using 'sorted(self._queue)'. # With heapq, two events scheduled at the same time will show in # the actual order they would be retrieved. events = self._queue[:] return map(heapq.heappop, [events]*len(events))
def _get(self, heappop=heapq.heappop): return heappop(self.queue)
def _create_binary_tree(self): """ Create a binary Huffman tree using stored vocabulary word counts. Frequent words will have shorter binary codes. Called internally from `build_vocab()`. """ vocab_size = len(self.vocab) logger.info("constructing a huffman tree from %i words" % vocab_size) # build the huffman tree heap = list(self.vocab.values()) heapq.heapify(heap) for i in range(vocab_size - 1): min1, min2 = heapq.heappop(heap), heapq.heappop(heap) heapq.heappush(heap, Vocab(count=min1.count + min2.count, index=i + vocab_size, left=min1, right=min2)) # recurse over the tree, assigning a binary code to each vocabulary word if heap: max_depth, stack = 0, [(heap[0], [], [])] while stack: node, codes, points = stack.pop() if node.index < vocab_size: # leaf node => store its path from the root node.code, node.point = codes, points max_depth = max(len(codes), max_depth) else: # inner node => continue recursion points = np.array(list(points) + [node.index - vocab_size], dtype=int) stack.append((node.left, np.array(list(codes) + [0], dtype=int), points)) stack.append((node.right, np.array(list(codes) + [1], dtype=int), points)) logger.info("built huffman tree with maximum node depth %i" % max_depth)
def add_starts(self, bap): syms = [] for line in bap.syms: heappush(syms, int(line, 16)) for i in range(len(syms)): idaapi.add_func(heappop(syms), idaapi.BADADDR) idc.Refresh() idaapi.refresh_idaview_anyway()
def _expire_timers(self, now): while (self._timers_heap and self._timers_heap[0] <= now): deadline = heapq.heappop(self._timers_heap) callbacks = self._timers.get(deadline) while callbacks: callbacks.pop()() del self._timers[deadline] return self._timers_heap[0] if self._timers_heap else 0 # Proton's event model was changed after 0.7
def _results(self): with self._condition: while self._current < self._exec_count: while not self._results_queue or self._results_queue[0][0] != self._current: self._condition.wait() while self._results_queue and self._results_queue[0][0] == self._current: _, res = heappop(self._results_queue) try: self._condition.release() if self._fail_fast and not res[0]: self._raise(res[1]) yield res finally: self._condition.acquire() self._current += 1