我们从Python开源项目中,提取了以下26个代码示例,用于说明如何使用gevent.GreenletExit()。
def activate(self): """ Called from other parts of the system to activate this Component. This will start its thread or will notify it to continue. """ if self.is_terminated(): return if not self.active(): self.start() else: self.trace_locks("act - lock") try: with self._lock: if self.status in (StatusValues.DORMANT, StatusValues.SUSP_FIPE): self._can_go.notify() self.trace_locks("act - signal") except GreenletExit as e: return finally: self.trace_locks("act - unlock")
def _run(self): """Run the wrapped function periodically""" try: while True: ts = time.time() if self.last_timestamp + self.interval_secs <= ts: self.last_timestamp = ts try: self.f(*self.args, **self.kwargs) except gevent.GreenletExit: # We are notified to exit. raise except BaseException as e: # We ignore other exceptions. log.error("Exception %s caught in Periodical %s " % ( repr(e), self.name)) # sleep until the time for the next run. sleep_secs = self.last_timestamp + self.interval_secs \ - time.time() if sleep_secs < 0: sleep_secs = 0 gevent.sleep(sleep_secs) except gevent.GreenletExit: log.info("Periodical %s stopped." % self.name)
def handle_request(self, *args): try: super(GeventWorker, self).handle_request(*args) except gevent.GreenletExit: pass except SystemExit: pass
def save_search_result(p, queue, retry=0): proxy = Proxy.get_random()['address'] url = SEARCH_URL.format(SEARCH_TEXT, p) try: r = fetch(url, proxy=proxy) except (Timeout, ConnectionError): sleep(0.1) retry += 1 if retry > 5: queue.put(url) raise GreenletExit() try: p = Proxy.objects.get(address=proxy) if p: p.delete() except DoesNotExist: pass return save_search_result(url, queue, retry) soup = BeautifulSoup(r.text, 'lxml') results = soup.find(class_='results') if results is None: # ???????, ?????? sleep(0.1) retry += 1 if retry > 5: queue.put(url) raise GreenletExit() return save_search_result(url, queue, retry) articles = results.find_all( 'div', lambda x: 'wx-rb' in x) for article in articles: save_article(article)
def handle_events(self): ''' Gets and Dispatches events in an endless loop using gevent spawn. ''' self.trace("handle_events started") while True: # Gets event and dispatches to handler. try: self.get_event() gevent.sleep(0) if not self.connected: self.trace("Not connected !") break except LimitExceededError: break except ConnectError: break except socket.error, se: break except GreenletExit, e: break except Exception, ex: self.trace("handle_events error => %s" % str(ex)) self.trace("handle_events stopped now") try: self.trace("handle_events socket.close") self.transport.sockfd.close() self.trace("handle_events socket.close success") except Exception, e: self.trace("handle_eventssocket.close ERROR: %s" % e) self.connected = False # prevent any pending request to be stuck self._flush_commands() return
def _callback_wrapper(self, command_id, command_obj, callback, greenlet): assert callable(callback) or callback is None if not self._queue.empty(): try: next_cmd_id = self._queue.get_nowait() log.d("Starting command id", next_cmd_id, " next in queue in service '{}'".format(self.name)) self._start(next_cmd_id) except queue.Empty: pass command_obj.state = command.CommandState.finished try: greenlet.get() except BaseException: log.exception("Command", "{}({})".format(command_obj.__class__.__name__, command_id), "raised an exception") command_obj.state = command.CommandState.failed command_obj.exception = greenlet.exception if constants.dev: raise # doesnt work if isinstance(greenlet.value, gevent.GreenletExit): command_obj.state = command.CommandState.stopped greenlet.value = None if command_id in self._decorators: greenlet.value = self._decorators[command_id](greenlet.value) log.d( "Command id", command_id, "in service '{}'".format( self.name), "has finished running with state:", str( command_obj.state)) if callback: callback(greenlet.value)
def save_search_result(page, queue, retry=0): proxy = Proxy.get_random()['address'] url = SEARCH_URL.format(SEARCH_TEXT, page) try: r = fetch(url, proxy=proxy) except (Timeout, ConnectionError, IOError): sleep(0.1) retry += 1 if retry > 5: put_new_page(page, queue) raise GreenletExit() try: p = Proxy.objects.get(address=proxy) if p: p.delete() except DoesNotExist: pass return save_search_result(page, queue, retry) soup = BeautifulSoup(r.text, 'lxml') results = soup.find(class_='results') if results is None: # ???????, ?????? sleep(0.1) retry += 1 if retry > 5: put_new_page(page, queue) print 'retry too much!' raise GreenletExit() return save_search_result(page, queue, retry) articles = results.find_all( 'div', lambda x: 'wx-rb' in x) for article in articles: save_article(article) page_container = soup.find(id='pagebar_container') if page_container and u'???' in page_container.text: last_page = int(page_container.find_all('a')[-2].text) current_page = int(page_container.find('span').text) for page in range(current_page + 1, last_page + 1): put_new_page(page, queue)
def spawn_locusts(self, spawn_count=None, stop_timeout=None, wait=False): if spawn_count is None: spawn_count = self.num_clients if self.num_requests is not None: self.stats.max_requests = self.num_requests bucket = self.weight_locusts(spawn_count, stop_timeout) spawn_count = len(bucket) if self.state == STATE_INIT or self.state == STATE_STOPPED: self.state = STATE_HATCHING self.num_clients = spawn_count else: self.num_clients += spawn_count logger.info("Hatching and swarming %i clients at the rate %g clients/s..." % (spawn_count, self.hatch_rate)) occurence_count = dict([(l.__name__, 0) for l in self.locust_classes]) def hatch(): sleep_time = 1.0 / self.hatch_rate while True: if not bucket: logger.info("All locusts hatched: %s" % ", ".join(["%s: %d" % (name, count) for name, count in occurence_count.items()])) events.hatch_complete.fire(user_count=self.num_clients) return locust = bucket.pop(random.randint(0, len(bucket)-1)) occurence_count[locust.__name__] += 1 def start_locust(_): try: locust().run() except GreenletExit: pass self.locusts.spawn(start_locust, locust) if len(self.locusts) % 10 == 0: logger.debug("%i locusts hatched" % len(self.locusts)) gevent.sleep(sleep_time) hatch() if wait: self.locusts.join() logger.info("All locusts dead\n")
def run(self, *args, **kwargs): self.args = args self.kwargs = kwargs try: if hasattr(self, "on_start"): self.on_start() except InterruptTaskSet as e: if e.reschedule: raise RescheduleTaskImmediately(e.reschedule) else: raise RescheduleTask(e.reschedule) while True: try: if self.locust.stop_timeout is not None and time() - self._time_start > self.locust.stop_timeout: return if not self._task_queue: self.schedule_task(self.get_next_task()) try: self.execute_next_task() except RescheduleTaskImmediately: pass except RescheduleTask: self.wait() else: self.wait() except InterruptTaskSet as e: if e.reschedule: raise RescheduleTaskImmediately(e.reschedule) else: raise RescheduleTask(e.reschedule) except StopLocust: raise except GreenletExit: raise except Exception as e: events.locust_error.fire(locust_instance=self, exception=e, tb=sys.exc_info()[2]) if self.locust._catch_exceptions: sys.stderr.write("\n" + traceback.format_exc()) self.wait() else: raise
def do_sed(message): if message.channel not in channels: return try: regex, replacement, flags, target = parse_sed(message.content[1:]) except ValueError: return try: c = re.compile(regex, flags & 127) except re.error as e: return g = gevent.getcurrent() def raiseKeyboardInterrupt(s, i): print("timing out!", g) gevent.spawn(message.reply, 'fk off with ur evil regex bro') g.throw(gevent.GreenletExit) # ## We install a signal handler, to timeout the regular expression match if it's taking too long, i.e. evil regexp # ## s/^(a+)+$/rip/ old_sighandler = signal.signal(signal.SIGALRM, raiseKeyboardInterrupt) signal.setitimer(signal.ITIMER_REAL, 0.05) try: m = c.search q = channels[message.channel] for i in range(-1, -len(q) - 1, -1): nick, line = q[i] if m(line) and (not target or nick.lower() == target): q[i] = nick, doTranslation(c.sub(replacement, line, 0 if flags & 0x100 else 1)[:400], flags) gevent.spawn_later(0, message.reply, '*%s*: %s' % (nick, q[i][1])) break except re.error as e: return finally: ### Restore original handlers. signal.setitimer(signal.ITIMER_REAL, 0) signal.signal(signal.SIGALRM, old_sighandler)