我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用tornado.gen.Return()。
def create_attribute(self, cls, attr_name): async_method = self.property.create_attribute(cls, attr_name) original_class = self.original_class @functools.wraps(async_method) @motor_coroutine def wrapper(self, *args, **kwargs): result = yield async_method(self, *args, **kwargs) # Don't call isinstance(), not checking subclasses. if result.__class__ == original_class: # Delegate to the current object to wrap the result. raise gen.Return(self.wrap(result)) else: raise gen.Return(result) if self.doc: wrapper.__doc__ = self.doc return wrapper
def resolve(self, host, port, family): """Return list of (family, address) pairs.""" child_gr = greenlet.getcurrent() main = child_gr.parent assert main is not None, "Should be on child greenlet" def handler(exc_typ, exc_val, exc_tb): # If netutil.Resolver is configured to use TwistedResolver. if DomainError and issubclass(exc_typ, DomainError): exc_typ = socket.gaierror exc_val = socket.gaierror(str(exc_val)) # Depending on the resolver implementation, we could be on any # thread or greenlet. Return to the loop's thread and raise the # exception on the calling greenlet from there. self.io_loop.add_callback(functools.partial( child_gr.throw, exc_typ, exc_val, exc_tb)) return True # Don't propagate the exception. with stack_context.ExceptionStackContext(handler): self.resolver.resolve(host, port, family, callback=child_gr.switch) return main.switch()
def connect(self, host, port, af=socket.AF_UNSPEC, ssl_options=None, max_buffer_size=None): """Connect to the given host and port. Asynchronously returns an `.IOStream` (or `.SSLIOStream` if ``ssl_options`` is not None). """ addrinfo = yield self.resolver.resolve(host, port, af) connector = _Connector( addrinfo, self.io_loop, functools.partial(self._create_stream, max_buffer_size)) af, addr, stream = yield connector.start() # TODO: For better performance we could cache the (af, addr) # information here and re-use it on subsequent connections to # the same host. (http://tools.ietf.org/html/rfc6555#section-4.2) if ssl_options is not None: stream = yield stream.start_tls(False, ssl_options=ssl_options, server_hostname=host) raise gen.Return(stream)
def test_async_await_mixed_multi_native_yieldpoint(self): namespace = exec_test(globals(), locals(), """ async def f1(): await gen.Task(self.io_loop.add_callback) return 42 """) @gen.coroutine def f2(): yield gen.Task(self.io_loop.add_callback) raise gen.Return(43) f2(callback=(yield gen.Callback('cb'))) results = yield [namespace['f1'](), gen.Wait('cb')] self.assertEqual(results, [42, 43]) self.finished = True
def test_swallow_yieldpoint_exception(self): # Test exception handling: a coroutine can catch an exception # raised by a yield point and not raise a different one. @gen.coroutine def f1(): 1 / 0 @gen.coroutine def f2(): try: yield f1() except ZeroDivisionError: raise gen.Return(42) result = yield f2() self.assertEqual(result, 42) self.finished = True
def test_swallow_context_exception(self): # Test exception handling: exceptions thrown into the stack context # can be caught and ignored. @gen.coroutine def f2(): (yield gen.Callback(1))() yield gen.Wait(1) self.io_loop.add_callback(lambda: 1 / 0) try: yield gen.Task(self.io_loop.add_timeout, self.io_loop.time() + 10) except ZeroDivisionError: raise gen.Return(42) result = yield f2() self.assertEqual(result, 42) self.finished = True
def _read_socks5_address(self): atyp = yield self.read_bytes(1) if atyp == b"\x01": data = yield self.read_bytes(4) addr = socket.inet_ntoa(data) elif atyp == b"\x03": length = yield self.read_bytes(1) addr = yield self.read_bytes(length) elif atyp == b"\x04": data = yield self.read_bytes(16) addr = socket.inet_ntop(socket.AF_INET6, data) else: raise GeneralProxyError("SOCKS5 proxy server sent invalid data") data = yield self.read_bytes(2) port = struct.unpack(">H", data)[0] raise gen.Return((addr, port))
def _read_stream_body(self, content_length, delegate): while 0 < content_length: try: body = yield self.stream.read_bytes( min(self.params.chunk_size, content_length), partial=True) except StreamClosedError: # with partial stream will update close status after receiving # the last chunk, so we catch StreamClosedError instead raise gen.Return(False) content_length -= len(body) if not self._write_finished or self.is_client: with _ExceptionLoggingContext(app_log): ret = delegate.data_received(body) if ret is not None: yield ret raise gen.Return(True)
def apparent_encoding(self): """The apparent encoding, provided by the chardet library.""" def _encoding(content): return chardet.detect(content)['encoding'] @gen.coroutine def _stream_apparent_encoding(): content = yield self.content raise Return(_encoding(content)) if not isinstance(self.raw, HTTPMessageDelegate): raise TypeError('self.raw must be a trip.adapters.MessageDelegate') if self.raw.stream: return _stream_apparent_encoding() else: return _encoding(self.content)
def make_connection(self, endpoint, api_path): conn = Connection(endpoint, api_path, self._event_handler, principal=self.principal, secret=self.secret) try: yield conn.ping() except MasterRedirect as ex: # pragma: no cover if ex.location == self.master_info.current_location: log.warn('Leading Master not elected yet') else: # pragma: no cover log.warn('Master not leading') self.master_info.redirected_uri(ex.location) conn = None except ConnectionRefusedError as ex: # pragma: no cover conn = None except Exception: # pragma: no cover conn = None raise gen.Return(conn)
def get_instances(matrix_api=None): """ ?????matrix?API????????? ?????????????????? """ try: # response = yield httpclient.AsyncHTTPClient().fetch(MATRIX_API_GET_INSTANCES) # handle matrix response here # ????????????matrix?? conf = yaml.safe_load(open(ALERT_CONF)) instances = conf['instances'] except Exception as e: logging.error(e, exc_info=True) raise gen.Return([]) raise gen.Return(instances)
def benchmark(): client = SimpleAsyncHTTP2Client( host=options.h, port=options.p, secure=options.s, max_streams=options.c, connect_timeout=5, enable_push=False, initial_window_size=2**24-1, ) start = time.time() futures = [] for i in range(options.n): futures.append(client.fetch('/')) yield futures end = time.time() raise gen.Return(end - start)
def get_db_object_by_attr(object_, **kwargs): assert len(kwargs) >= 1, 'function get_db_object_by_attr need argument' if 'ignore' in kwargs: ignore = kwargs.pop('ignore') else: ignore = False if len(kwargs) > 1: filter_ = and_(*[getattr(object_, key) == value for key, value in kwargs.iteritems()]) else: key, value = kwargs.popitem() filter_ = getattr(object_, key) == value user = yield execute(sqls=[('query', object_), ('filter', filter_), ('first', None)]) if not user and not ignore: raise gen.Return(invalid_argument_error('wrong %s' % key)) raise gen.Return(user)
def generate_invitingcode(): random_seed = [ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', 'a', 's', 'd', 'f', 'g', 'h', 'j', 'k', 'l', 'z', 'x', 'c', 'v', 'b', 'n', 'm', 'P', 'O', 'I', 'U', 'Y', 'T', 'R', 'E', 'W', 'Q', 'A', 'S', 'D', 'F', 'G', 'H', 'J', 'K', 'L', 'M', 'N', 'B', 'V', 'C', 'X', 'Z' ] code = "".join(random.sample(random_seed, random.randint(4, 8))) while True: temp = yield get_db_object_by_attr(User, code=code, ignore=True) if not temp: break code = "".join(random.sample(random_seed, random.randint(4, 8))) raise gen.Return(code)
def put_user(user, nickname=None, headimg=None, sex=None, introduction=None): if nickname and nickname != user.nickname: if (yield get_db_object_by_attr(User, nickname=nickname, ignore=True)) is not None: raise gen.Return(already_exist_error('nickname %s is already existed' % nickname)) user.nickname = nickname if headimg: if user.headimg != DEFAULT_HEADIMG: result = yield remove_image_from_oss(user.headimg) if not result: LOG.error('failed to remove image: %s' % user.headimg) headimg_path = yield save_image_to_oss(headimg, OSS_HEADIMG_PATH, str(datetime.now()), when_fail=DEFAULT_HEADIMG) user.headimg = headimg_path if sex: user.sex = sex if introduction: user.introduction = introduction yield execute(('add', user)) yield execute(('commit', None))
def save_image_to_oss(file_, storge_path, key, when_fail=None): if not allow_image_format(file_["filename"]): raise gen.Return(invalid_argument_error('invalid image format: only jpg, ipeg, png is supported')) if not allow_image_size(file_): raise gen.Return(invalid_argument_error('invalid image size: less than or equal 2M is required')) seed = hash_str(os.path.splitext(file_["filename"])[0] + key) image_filename = storge_path + seed + os.path.splitext(file_["filename"])[1] result = yield CommonTaskFactory. \ get_task(TaskNames.PROCESS_IMAGE.value). \ run('upload_file', file_["body"], image_filename) if result: raise gen.Return(image_filename) else: raise gen.Return(when_fail)
def get_links_from_url(url): """Download the page at `url` and parse it for links. Returned links have had the fragment after `#` removed, and have been made absolute so, e.g. the URL 'gen.html#tornado.gen.coroutine' becomes 'http://www.tornadoweb.org/en/stable/gen.html'. """ try: response = yield httpclient.AsyncHTTPClient().fetch(url) print('fetched %s' % url) html = response.body if isinstance(response.body, str) \ else response.body.decode() urls = [urljoin(url, remove_fragment(new_url)) for new_url in get_links(html)] except Exception as e: print('Exception: %s %s' % (e, url)) raise gen.Return([]) raise gen.Return(urls)
def get_user_list(): ''' ????????? ???openid, ?? ''' access_token = yield find_access_token() if access_token is None: raise Return(False) client = AsyncHTTPClient() resp = yield client.fetch(tornado_options.get_user_list_url.format(access_token, '')) openids = json.loads(resp.body).get('data').get('openid') result = dict() for openid in openids: ret = yield get_user_detail(openid) tmp = dict(headimgurl = ret.get('headimgurl'), openid = ret.get('openid'), nickname = ret.get('nickname')) result[openid] = tmp raise Return(result)
def check_signature(signature, timestamp, nonce, echostr): ''' ?????? ''' # ?1???token?timestamp?nonce??????????? mylist = sorted([tornado_options.token, timestamp, nonce]) # ?token, timestamp?nonce????????????? # ?2????????????????????sha1?? mystr = ''.join(mylist) mystr_encoded = hashlib.sha1(mystr).hexdigest() # ????????sha1?? # ?3????????????????signature????????????? if mystr_encoded == signature: raise Return(echostr) else: raise Return(None)
def unbind(db, openid): ''' Parameters ---------- openid??????openid Returns ------- True ? False ?????????? ''' sql = "DELETE FROM dl_user_weixin WHERE openid = '%s'" % openid try: cursor = yield db.execute(sql) except Exception, e: BIZLOG.error('ERROR INFO IS : %s' % e.message) raise Return(False) if cursor.rowcount == 0: BIZLOG.error('UNBIND ERROR: [openid: %s]' % (openid, )) raise Return(False) else: BIZLOG.info('UNBIND SUCCESS: [openid: %s]' % (openid, )) raise Return(True)
def connect(self): """????""" try: self.recorder('INFO', '{obj} connect start'.format(obj=self)) self.set_idle() self._client = motor_tornado.MotorClient(**self.setting) if self.db: self.select_db(self.db) self.isConnect = True self.recorder('INFO', '{obj} connect successful'.format(obj=self)) except ConnectionFailure as e: self.recorder('ERROR', '{obj} connect failed [{msg}]'.format(obj=self, msg=e)) self.error() raise MongoError raise Return(self)
def query(self, command, value=1, check=True, allowable_errors=None, **kwargs): """????? pymongo????????,?????? TODO:????????????""" if not self._db: self.recorder('CRITICAL', 'please select db first!') shell_command = 'db.runCommand(\n{cmd}\n)'.format(cmd=dumps(command, indent=4, whole=4)) self.recorder('INFO', '{obj} command start\n{cmd}'.format(obj=self, cmd=shell_command)) try: with tool.timing('s', 10) as t: response = yield self._db.command(command=command, value=value, check=check, allowable_errors=allowable_errors, **kwargs) except pymongo.errors.PyMongoError as e: self.recorder('ERROR', '{obj} command error [{msg}]'.format(obj=self, msg=e)) raise MongoError self.recorder('INFO', '{obj} command successful\n{cmd} -- {time}'.format(obj=self, cmd=shell_command, time=t)) self._response = self._parse_response(response) raise Return(self._response)
def communicate(): # create client transport = TTornado.TTornadoStreamTransport('localhost', 9999) # open the transpo40.163rt, bail on error try: yield transport.open() print('Transport is opened') except TTransport.TTransportException as ex: logging.error(ex) raise gen.Return() protocol = TBinaryProtocol.TBinaryProtocolFactory() #pfactory = TMultiplexedProtocol.TMultiplexedProtocol(protocol, 'hello') client = HelloService.Client(transport, protocol) # ping yield client.sayHello() print("ping()") client._transport.close() raise gen.Return()
def communicate(): # create client transport = TTornado.TTornadoStreamTransport('localhost', 7777) # open the transport, bail on error try: yield transport.open() print('Transport is opened') except TTransport.TTransportException as ex: logging.error(ex) raise gen.Return() protocol = TCompactProtocol.TCompactProtocolFactory() #pfactory = TMultiplexedProtocol.TMultiplexedProtocol(protocol, 'hello') client = HelloService.Client(transport, protocol) # ping yield client.sayHello() print("ping()") client._transport.close() raise gen.Return()
def verify_access_service(self, client): """ Verify the token's client / delegate has access to the service """ try: service = yield Service.get(self.request.client_id) except couch.NotFound: raise Unauthorized("Unknown service '{}'" .format(self.request.client_id)) has_access = client.authorized(self.requested_access, service) if not has_access: raise Unauthorized("'{}' does not have '{}' to service '{}'" .format(client.id, self.requested_access, self.request.client_id)) raise Return(True)
def generate_token(self): """Generate a delegate token""" self.validate_grant() self.validate_scope() # Assuming delegation always requires write access # should change it to a param client = yield Service.get(self.assertion['client']['id']) has_access = client.authorized('w', self.request.client) if not has_access: raise Unauthorized('Client "{}" may not delegate to service "{}"'.format( self.assertion['client']['id'], self.request.client_id )) token, expiry = generate_token(client, self.requested_scope, self.grant_type, delegate_id=self.request.client_id) raise Return((token, expiry))
def _check_access_resource_ids(self, func, resources): """ Check resource identified by an ID exist and then call func for each resource """ if not resources: raise Return() for resource_id in resources: try: doc = yield views.service_and_repository.first(key=resource_id) except couch.NotFound: raise InvalidScope('Scope contains an unknown resource ID') resource = RESOURCE_TYPES[doc['value']['type']](**doc['value']) try: yield resource.get_parent() except couch.NotFound: raise InvalidScope('Invalid resource - missing parent') func(resource, resources[resource_id])
def create_iostream_pair(self): _lock = Event() server_streams = [] def accept_callback(conn, addr): server_stream = MicroProxyIOStream(conn) server_streams.append(server_stream) # self.addCleanup(server_stream.close) _lock.set() listener, port = bind_unused_port() add_accept_handler(listener, accept_callback) client_stream = MicroProxyIOStream(socket.socket()) yield [client_stream.connect(('127.0.0.1', port)), _lock.wait()] self.io_loop.remove_handler(listener) listener.close() raise Return((client_stream, server_streams[0]))
def process_and_return_context(self): while not self.finished(): self.req = None self.resp = None try: yield self.read_request() yield self.handle_http_proxy() self.send_request() yield self.read_response() self.send_response() except SrcStreamClosedError: if self.dest_stream: self.dest_stream.close() self.context.done = True if self.req: raise except DestStreamClosedError: self.src_stream.close() raise except SwitchToTunnelHttpProxy: break if self.switch_protocol: self.context.scheme = self.req.headers["Upgrade"] raise gen.Return(self.context)
def handle_http_proxy(self): if self.is_tunnel_http_proxy(): logger.debug("{0} proxy tunnel to {1}".format(self, self.req.path)) scheme, host, port = parse_tunnel_proxy_path(self.req.path) yield self.connect_to_dest(scheme, (host, port)) self.src_conn.send_response(HttpResponse( code="200", reason="OK", version="HTTP/1.1")) raise SwitchToTunnelHttpProxy elif self.is_normal_http_proxy(): logger.debug("{0} proxy to {1}".format(self, self.req.path)) scheme, host, port, path = parse_proxy_path(self.req.path) self.req.path = path yield self.connect_to_dest(scheme, (host, port)) self.dest_conn.io_stream = self.dest_stream else: raise gen.Return(None)
def process_and_return_context(self): self.socks_conn.initiate_connection() while True: try: data = yield self.context.src_stream.read_bytes(1024, partial=True) except iostream.StreamClosedError: raise SrcStreamClosedError( detail="client closed while socks handshaking") _event = self.socks_conn.recv(data) if _event == "GreetingRequest": yield self.handle_greeting_request(_event) elif _event == "Request": dest_stream, host, port = yield self.handle_request_and_create_destination(_event) self.context.dest_stream = dest_stream self.context.host = host self.context.port = port break else: raise NotImplementedError("not handling with {0}".format(_event)) raise gen.Return(self.context)
def handle_request_and_create_destination(self, event): """Handle the socks request from source Create destination connection Returns: tuple: (dest_stream, host, port) """ if event.cmd != REQ_COMMAND["CONNECT"]: logger.debug("Unsupport connect type") yield self.send_event_to_src_conn(Response( RESP_STATUS["COMMAND_NOT_SUPPORTED"], event.atyp, event.addr, event.port), raise_exception=False) raise ProtocolError("Unsupport bind type") try: dest_stream = yield self.create_dest_stream((str(event.addr), event.port)) except gen.TimeoutError as e: yield self.handle_timeout_error(e, event) except iostream.StreamClosedError as e: yield self.handle_stream_closed_error(e, event) else: yield self.send_event_to_src_conn(Response( RESP_STATUS["SUCCESS"], event.atyp, event.addr, event.port)) raise gen.Return((dest_stream, event.addr, event.port))
def get(self, query, params, dry_output=False): """ Use this method to fetch data from db. param query: (str) actual query to be executed param dry_output: (bool) switch output style return: If dry_output True - output tuple of tuples, otherwise list of dicts """ #print(datetime.now()) #print("DB_GET: "+query) #print("INPUT: "+str(params)) with (yield self.pool.Connection()) as conn: with conn.cursor() as cursor: yield cursor.execute(query, params) yield conn.commit() data = rows = cursor.fetchall() cols = [x[0] for x in cursor.description] if not dry_output: data = [] for row in rows: record = {} for prop, val in zip(cols, row): record[prop] = val data.append(record) raise gen.Return(data)
def readline(self, max_bytes=-1): timer = None if self._readtimeout: timer = Timeout(self._readtimeout) timer.start() try: if max_bytes > 0: buff = yield self._iostream.read_until('\n', max_bytes=max_bytes) else: buff = yield self._iostream.read_until('\n') raise Return(buff) except TimeoutException: self.close() raise finally: if timer: timer.cancel()
def init_community_nodes(): node_list = [ u'???', u'??', u'??', u'????', u'???', u'??', u'??', u'??', u'??', u'??', u'??', u'????', u'??'] for i, node in enumerate(node_list): document = { 'name': node, 'sort': i, 'category': NodeDocument.BUILTIN } existed = yield NodeDocument.find_one({"name": node}) if not existed: yield NodeDocument.insert(document) raise gen.Return()
def add_share_category(): collection = Connection.get_database(pymongo=True).share_category category_list = [ u'??', u'??', u'??', u'??', u'??', u'??', u'??', u'??'] for i, category in enumerate(category_list): document = { 'name': category, 'sort': i } existed = collection.find_one({"name": category}) if not existed: collection.insert(document) raise gen.Return()
def get_chat_message_list(user_id, skip=0, limit=None): '''????????????''' user_dbref = DBRef(UserDocument.meta['collection'], ObjectId(user_id)) query = { '$or': [{'sender': user_dbref}, {'recipient': user_dbref}] } cursor = ChatMessageDocument.find(query).sort( [('send_time', pymongo.DESCENDING)] ).skip(skip) if limit is not None: cursor = cursor.limit(limit) chat_message_list = yield ChatMessageDocument.to_list(cursor) chat_message_list = yield ChatMessageDocument.translate_dbref_in_document_list( chat_message_list) raise gen.Return(chat_message_list)