我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用types.GeneratorType()。
def inlineCallbacks(f,*args, **kwargs): # ... try: gen = f(*args, **kwargs) except defer._DefGen_Return: raise TypeError( "inlineCallbacks requires %r to produce a generator; instead" "caught returnValue being used in a non-generator" % (f,)) if not isinstance(gen, types.GeneratorType): raise TypeError( "inlineCallbacks requires %r to produce a generator; " "instead got %r" % (f, gen)) return defer._inlineCallbacks(None, gen, defer.Deferred()) # ... # ... # ...
def isgenerator(object): """Return true if the object is a generator. Generator objects provide these attributes: __iter__ defined to support interation over container close raises a new GeneratorExit exception inside the generator to terminate the iteration gi_code code object gi_frame frame object or possibly None once the generator has been exhausted gi_running set to 1 when generator is executing, 0 otherwise next return the next item from the container send resumes the generator and "sends" a value that becomes the result of the current yield-expression throw used to raise an exception inside the generator""" return isinstance(object, types.GeneratorType)
def to_chunks(stream_or_generator): """This generator function receives file-like or generator as input and returns generator. :param file|__generator[bytes] stream_or_generator: readable stream or generator. :rtype: __generator[bytes] :raise: TypeError """ if isinstance(stream_or_generator, types.GeneratorType): yield from stream_or_generator elif hasattr(stream_or_generator, 'read'): while True: chunk = stream_or_generator.read(CHUNK_SIZE) if not chunk: break # no more data yield chunk else: raise TypeError('Input must be either readable or generator.')
def test_iter_quarters(): start = timezone.make_aware(datetime(2015, 11, 30, 1, 2, 3)) end = timezone.make_aware(datetime(2017, 2, 28, 11, 22, 33)) quarters = iter_quarters(start, end) assert type(quarters) is types.GeneratorType starts = [ datetime.combine(datetime(year, month, day).date(), start.timetz()) for year, month, day in [ (2015, 11, 30), (2016, 2, 29), # leap! (2016, 5, 30), (2016, 8, 30), (2016, 11, 30), (2017, 2, 28), ] ] ends = starts[1:] + [end] assert list(quarters) == list(zip(starts, ends))
def test_iter_years(): start = timezone.make_aware(datetime(2016, 2, 29, 1, 2, 3)) end = timezone.make_aware(datetime(2019, 2, 28, 11, 22, 33)) years = iter_years(start, end) assert type(years) is types.GeneratorType starts = [ datetime.combine(datetime(year, month, day).date(), start.timetz()) for year, month, day in [ (2016, 2, 29), # leap! (2017, 2, 28), (2018, 2, 28), (2019, 2, 28), ] ] ends = starts[1:] + [end] assert list(years) == list(zip(starts, ends))
def once_only(func): called_funcs = {} @wraps(func) def wrapper(*args, **kwgs): if func.__name__ not in called_funcs: result = obj = func(*args, **kwgs) if isinstance(obj, types.GeneratorType): def gi_wrapper(): while True: result = obj.next() called_funcs[func.__name__] = result yield result return gi_wrapper() else: called_funcs[func.__name__] = result return result else: return called_funcs[func.__name__] return wrapper
def isgenerator(object): """Return true if the object is a generator. Generator objects provide these attributes: __iter__ defined to support iteration over container close raises a new GeneratorExit exception inside the generator to terminate the iteration gi_code code object gi_frame frame object or possibly None once the generator has been exhausted gi_running set to 1 when generator is executing, 0 otherwise next return the next item from the container send resumes the generator and "sends" a value that becomes the result of the current yield-expression throw used to raise an exception inside the generator""" return isinstance(object, types.GeneratorType)
def test_list_organizations(): responses.add(responses.GET, "https://app.threatstack.com/api/v1/organizations", content_type="application/json", body='[ \ {"role": "user", "id": "acbd18db4cc2f85cedef654fccc4a4d8", "name": "Foo\'s Organization"}, \ {"role": "user", "id": "37b51d194a7513e45b56f6524f2d51f2", "name": "Bar\'s Organization"} \ ]' ) ts = ThreatStack(api_key="test_api_key", api_version=1) response = ts.organizations.list() assert isinstance(response, GeneratorType) count = 0 for org in response: count += 1 assert count == 2
def test__batch_generator(self, dataset_provider, mocker): mocker.patch.object(dataset_provider, '_preprocess_batch', lambda x, _: x) datum_list = range(10) generator = dataset_provider._batch_generator(datum_list) results = [next(generator) for _ in range(4)] assert [len(x) for x in results] == [4, 4, 2, 4] assert sorted(sum(results[:-1], [])) == datum_list datum_list = range(12) generator = dataset_provider._batch_generator(datum_list) assert isinstance(generator, GeneratorType) results = list(islice(generator, 4)) assert [len(x) for x in results] == [4, 4, 4, 4] assert sorted(sum(results[:-1], [])) == datum_list
def test_run_generator(self, mCreateSession): iSession = MockSession() mCreateSession.return_value = (iSession, '123456') client = iSession.client('stepfunctions') def target(input_): yield yield return # Just make sure the target is actually a generator self.assertEqual(type(target(None)), types.GeneratorType) task = TaskMixin(process = target) task.handle_task('token', None) self.assertEqual(task.token, None) call = mock.call.send_task_success(taskToken = 'token', output = 'null') call_ = mock.call.send_task_heartbeat(taskToken = 'token') calls = [call_, call_, call] self.assertEqual(client.mock_calls, calls)
def memoized(func): ''' A function decorator to make a function cache it's return values. If a function returns a generator, it's transformed into a list and cached that way. ''' cache = {} def wrapper(*args): if args in cache: return cache[args] val = func(*args) if isinstance(val, types.GeneratorType): val = list(val) cache[args] = val return val wrapper.__doc__ = func.__doc__ wrapper.func_name = '%s_memoized' % func.func_name return wrapper
def __call__( self, *args, **kwds ): self.debugLogThreading( 'ThreadSwitchScheduler(%d:%s): start %r( %r, %r )' % (self.instance_id, self.reason, self.function, args, kwds) ) #pylint disable=bare-except try: # call the function result = self.function( *args, **kwds ) # did the function run or make a generator? if type(result) != types.GeneratorType: self.debugLogThreading( 'ThreadSwitchScheduler(%d:%s): done (not GeneratorType)' % (self.instance_id, self.reason) ) # it ran - we are all done return # step the generator self.queueNextSwitch( result ) except: self.app.log.exception( 'ThreadSwitchScheduler(%d:%s)' % (self.instance_id, self.reason) )
def content_types_analyzer(node, source, validated): """Return for example {'content_types': { 'artist': 8610, 'person': 3, 'cinema_review': 769, 'venue': 729, 'cultural_event': 2487, 'organization': 1, 'review': 4187} } only includes content type != 0 """ if 'metadata_filter' in validated: validated['metadata_filter'].pop('content_types', None) objects = source(**validated) index = find_catalog('system')['content_type'] intersection = index.family.IF.intersection object_ids = getattr(objects, 'ids', objects) if isinstance(object_ids, (list, types.GeneratorType)): object_ids = index.family.IF.Set(object_ids) result = [(content_type, len(intersection(object_ids, oids))) for content_type, oids in index._fwd_index.items()] result = dict([(k, v) for k, v in result if v != 0]) return {'content_types': result}
def states_analyzer(node, source, validated): """Return for example {'states': { 'editable': 250, 'published': 16264, 'archived': 269, 'active': 3} } """ if 'metadata_filter' in validated: validated['metadata_filter'].pop('states', None) objects = source(**validated) index = find_catalog('dace')['object_states'] intersection = index.family.IF.intersection object_ids = getattr(objects, 'ids', objects) if isinstance(object_ids, (list, types.GeneratorType)): object_ids = index.family.IF.Set(object_ids) result = [(state_id, len(intersection(object_ids, oids))) for state_id, oids in index._fwd_index.items()] result = dict([(k, v) for k, v in result if v != 0]) return {'states': result}
def leaves(self, value): if value is None: raise ValueError('Leaves should be a list.') elif not isinstance(value, list) and \ not isinstance(value, types.GeneratorType): raise ValueError('Leaves should be a list or a generator (%s).' % type(value)) if self.prehashed: # it will create a copy of list or # it will create a new list based on the generator self._leaves = list(value) else: self._leaves = [ShardManager.hash(leaf) for leaf in value] if not len(self._leaves) > 0: raise ValueError('Leaves must contain at least one entry.') for leaf in self._leaves: if not isinstance(leaf, six.string_types): raise ValueError('Leaves should only contain strings.')
def test_make_tiles_tile_bounds(x, y): ''' Test if children tiles from z10 are created correctly ''' test_bounds = mercantile.bounds(x, y, 10) test_bbox = list(mercantile.xy(test_bounds.west, test_bounds.south)) + list(mercantile.xy(test_bounds.east, test_bounds.north)) test_crs = 'epsg:3857' test_minz = 10 test_maxz = 13 created_tiles_gen = _make_tiles(test_bbox, test_crs, test_minz, test_maxz) assert isinstance(created_tiles_gen, types.GeneratorType) created_tiles = list(created_tiles_gen) assert len(created_tiles) == 85
def test_batch(): """Test the batch feed dict generator.""" X = np.arange(100) fd = {'X': X} data = ab.batch(fd, batch_size=10, n_iter=10) # Make sure this is a generator assert isinstance(data, GeneratorType) # Make sure we get a dict back of a length we expect d = next(data) assert isinstance(d, dict) assert 'X' in d assert len(d['X']) == 10 # Test we get all of X back in one sweep of the data accum = list(d['X']) for ds in data: assert len(ds['X']) == 10 accum.extend(list(ds['X'])) assert len(accum) == len(X) assert set(X) == set(accum)
def test_batch_predict(): """Test the batch prediction feed dict generator.""" X = np.arange(100) fd = {'X': X} data = ab.batch_prediction(fd, batch_size=10) # Make sure this is a generator assert isinstance(data, GeneratorType) # Make sure we get a dict back of a length we expect with correct indices for ind, d in data: assert isinstance(d, dict) assert 'X' in d assert len(d['X']) == 10 assert all(X[ind] == d['X'])
def test_should_return_generator(self): responses.add( self.method, self.url, json={ 'links': {}, 'items': [] } ) assert_that( iterate_by_pagination( method=self.method, request_kwargs=self.request_kwargs, requests_session=self.requests_session, request_defaults=self.request_defaults ), instance_of(types.GeneratorType) )
def __call__(self, stream): """Run the pipeline Return a static (non generator) version of the result """ # Run the stream over all the filters on the pipeline for filter in self: # Functions and callable objects (objects with '__call__' method) if isinstance(filter, collections.Callable): stream = list(filter(stream)) # Normal filters (objects with 'process' method) else: stream = filter.process(None, stream) # If last filter return a generator, staticalize it inside a list if isinstance(stream, GeneratorType): return list(stream) return stream
def getMimeType(self, contenttype, format_types, result=None): supported_types = ["text/plain", "text/html", "application/yaml", "application/json"] CONTENT_TYPES = { "text/plain": str, "text/html": self._text2htmlSerializer, "application/yaml": self._resultyamlSerializer, "application/json": j.db.serializers.getSerializerType('j').dumps } if not contenttype: serializer = format_types["text"]["serializer"] return CONTENT_TYPE_HTML, serializer elif isinstance(result, types.GeneratorType): return 'application/octet-stream', lambda x: x else: mimeType = mimeparse.best_match(supported_types, contenttype) serializer = CONTENT_TYPES[mimeType] return mimeType, serializer
def doAudit(user, path, kwargs, responsetime, statuscode, result, tags): client = getClient('system') audit = client.audit.new() audit.user = user audit.call = path audit.statuscode = statuscode audit.tags = tags audit.args = json.dumps([]) # we dont want to log self auditkwargs = kwargs.copy() auditkwargs.pop('ctx', None) audit.kwargs = json.dumps(auditkwargs) try: if not isinstance(result, types.GeneratorType): audit.result = json.dumps(result) else: audit.result = json.dumps('Result of type generator') except: audit.result = json.dumps('binary data') audit.responsetime = responsetime client.audit.set(audit)
def process_result(results): """""" if isinstance(results, types.GeneratorType): for result in results: #pprint('') if result['result']: pprint('Success! %s' % result['from'] ) os.system('echo %s >> success.txt' % json.dumps(result['proxy'])) else: pprint('Failed! %s' % result['from'] ) os.system('echo %s >> failed.txt' % result['from']) else: result = results if result['result']: pprint('Success! %s' % result['from'] ) os.system('echo %s >> success.txt' % json.dumps(result['proxy'])) else: pprint('Failed! %s' % result['from'] ) os.system('echo %s >> failed.txt' % result['from']) #----------------------------------------------------------------------
def compute(self, name, raise_exceptions=False): """ Compute a node and all necessary predecessors Following the computation, if successful, the target node, and all necessary ancestors that were not already UPTODATE will have been calculated and set to UPTODATE. Any node that did not need to be calculated will not have been recalculated. If any nodes raises an exception, then the state of that node will be set to ERROR, and its value set to an object containing the exception object, as well as a traceback. This will not halt the computation, which will proceed as far as it can, until no more nodes that would be required to calculate the target are COMPUTABLE. :param name: Name of the node to compute :param raise_exceptions: Whether to pass exceptions raised by node computations back to the caller :type raise_exceptions: Boolean, default False """ if isinstance(name, (types.GeneratorType, list)): calc_nodes = set() for name0 in name: for n in self._get_calc_nodes(name0): calc_nodes.add(n) else: calc_nodes = self._get_calc_nodes(name) self._compute_nodes(calc_nodes, raise_exceptions=raise_exceptions)
def __init__(self, generator): """ Parameters ---------- generator : function the function (generator) to be used """ super(ExecutionPlan, self).__init__() if not isinstance(generator, types.GeneratorType): generator = generator() assert isinstance(generator, types.GeneratorType) self._generator = generator self._running = True self._finish_conditions = []
def ipwrap(value, query = ''): try: if isinstance(value, (list, tuple, types.GeneratorType)): _ret = [] for element in value: if ipaddr(element, query, version = False, alias = 'ipwrap'): _ret.append(ipaddr(element, 'wrap')) else: _ret.append(element) return _ret else: _ret = ipaddr(value, query, version = False, alias = 'ipwrap') if _ret: return ipaddr(_ret, 'wrap') else: return value except: return value
def calculate(self, data, starting_month=1): ''' First dimension of data should be time (months) ''' # Check if distribution has been fit on historical data if self.dist_type is None: print ("You must fit a distribution first") return False if isinstance(data, types.GeneratorType): pass else: spi = self.calculate_over_full_series(data, starting_month) return spi
def deregister_response(fn): """ Deregister response from the registry. It's a decorator. """ @wraps(fn) def inner(self, *args, **kwargs): item_or_request = fn(self, *args, **kwargs) if isinstance(item_or_request, types.GeneratorType): item_or_request = get_consistent_generator(item_or_request) # Only decrease counter if the item_or_request passed the filter if item_or_request: response = self._get_response(args, kwargs) self._decrease_counter(response) return item_or_request return inner
def visit(self, node): stack = [ Visit(node) ] last_result = None while stack: try: last = stack[-1] if isinstance(last, types.GeneratorType): stack.append(last.send(last_result)) last_result = None elif isinstance(last, Visit): stack.append(self._visit(stack.pop().node)) else: last_result = stack.pop() except StopIteration: stack.pop() return last_result
def visit(self, node): stack = [ node ] last_result = None while stack: try: last = stack[-1] if isinstance(last, types.GeneratorType): stack.append(last.send(last_result)) last_result = None elif isinstance(last, Node): stack.append(self._visit(stack.pop())) else: last_result = stack.pop() except StopIteration: stack.pop() return last_result
def get_type(obj): """Return the static type that would be used in a type hint""" if isinstance(obj, type): return Type[obj] elif isinstance(obj, _BUILTIN_CALLABLE_TYPES): return Callable elif isinstance(obj, types.GeneratorType): return Iterator[Any] typ = type(obj) if typ is list: elem_type = shrink_types(get_type(e) for e in obj) return List[elem_type] elif typ is set: elem_type = shrink_types(get_type(e) for e in obj) return Set[elem_type] elif typ is dict: key_type = shrink_types(get_type(k) for k in obj.keys()) val_type = shrink_types(get_type(v) for v in obj.values()) return Dict[key_type, val_type] elif typ is tuple: if not obj: return Tuple return Tuple[tuple(get_type(e) for e in obj)] return typ
def add(self, *objs): """ Add a sequence of polynomials or containers of polynomials. Example ------- >>> from sympy.polys.rings import ring >>> from sympy.polys.domains import ZZ >>> R, x = ring("x", ZZ) >>> R.add([ x**2 + 2*i + 3 for i in range(4) ]) 4*x**2 + 24 >>> _.factor_list() (4, [(x**2 + 6, 1)]) """ p = self.zero for obj in objs: if is_sequence(obj, include=GeneratorType): p += self.add(*obj) else: p += obj return p
def mul(self, *objs): """ Multiply a sequence of polynomials or containers of polynomials. Example ------- >>> from sympy.polys.rings import ring >>> from sympy.polys.domains import ZZ >>> R, x = ring("x", ZZ) >>> R.mul([ x**2 + 2*i + 3 for i in range(4) ]) x**8 + 24*x**6 + 206*x**4 + 744*x**2 + 945 >>> _.factor_list() (1, [(x**2 + 3, 1), (x**2 + 5, 1), (x**2 + 7, 1), (x**2 + 9, 1)]) """ p = self.one for obj in objs: if is_sequence(obj, include=GeneratorType): p *= self.mul(*obj) else: p *= obj return p
def run(self): threadless.log.debug("threadlet: %s: tasklet(%s): running", self.threadlet.name, self.name) self.running = True try: value = self.func(self) if isinstance(value, types.GeneratorType): value = yield from value threadless.log.debug("threadlet: %s: tasklet(%s): done", self.threadlet.name, self.name) except asyncio.CancelledError: threadless.log.warn("threadlet: %s: tasklet(%s): cancelled", self.threadlet.name, self.name) except Exception: threadless.log.exception("threadlet: %s: tasklet(%s): exception", self.threadlet.name, self.name) del self.running if self.suspended or self.cancelled: return if self.period and self not in self.threadlet.timeouts: self.schedule(self.period)
def work(self, queues, **kwargs): # ??????queue?????? self.command_queue = command_queue = queues['command_queue'] self.data_queue = data_queue = queues['data_queue'] while True: # ???? cmd = command_queue.get() # ?????????? if cmd: raw_datas = self.user_check(cmd) if isinstance(raw_datas, types.GeneratorType): for raw_data in raw_datas: # ?????????handler data_queue.put(raw_data) else: # ?????????handler data_queue.put(raw_datas) else: log.error('\nNo command received') kwargs['record'].thread_signal[kwargs['name']] = time.time()
def content(self): content = self._fixed_content if not content and self.parent: content = self.parent._find_content(self._name) if isinstance(content, DOMElement) or content: if isinstance(content, DOMElement): yield content elif isinstance(content, (list, tuple, GeneratorType)): yield from content elif isinstance(content, dict): yield content elif isinstance(content, str): yield content else: yield from iter([content, ]) else: return
def __init__(self, name, obj): super().__init__() if not name and issubclass(obj.__class__, DOMElement): name = obj._name self.name = name self.obj = obj if isinstance(obj, GeneratorType): self._iterable = True elif issubclass(obj.__class__, DOMElement) or isinstance(obj, str): self._iterable = False else: try: _ = (e for e in obj) self._iterable = True except TypeError: self._iterable = False
def yield_domgroups(items, kwitems, reverse=False): """Flattens the given items/kwitems. Yields index and DOMGroup after flattening and a DOMGroup. "reverse" parameter inverts the flattened yielding. """ verse = (1, -1)[reverse] if isinstance(items, GeneratorType): items = list(items) unnamed = (DOMGroup(None, item) for item in items[::verse]) named = (DOMGroup(k, v) for k, v in list(kwitems.items())[::verse]) contents = (unnamed, named)[::verse] for i, group in enumerate(chain(*contents)): if isinstance(group.obj, DOMElement): # Is the DOMGroup is a single DOMElement and we have a name we set his name accordingly group.obj._name = group.name yield i, group
def do(func, func_args, func_kargs, Monad): @handle_monadic_throws(Monad) def run_maybe_iterator(): itr = func(*func_args, **func_kargs) if isinstance(itr, types.GeneratorType): @handle_monadic_throws(Monad) def send(*vals): try: # here's the real magic monad = itr.send(*vals) return monad.bind(send) except StopIteration: return Monad.unit(None) return send(None) else: #not really a generator if itr is None: return Monad.unit(None) else: return itr run_maybe_iterator.__name__ = func.__name__ return run_maybe_iterator()
def _find_generators(self, item): """ A recursive function to flatten generators into lists """ try: result = [] # Make sure dicts aren't flattened to lists if isinstance(item, dict): result = {} for i in item: result[self._find_generators(i)] = self._find_generators(item[i]) return result # Since NoneObjects and strings are both iterable, treat them specially if isinstance(item, obj.NoneObject) or isinstance(item, str): return item if isinstance(item, types.GeneratorType): raise CacheContainsGenerator for x in iter(item): flat_x = self._find_generators(x) result.append(flat_x) return result except TypeError: return item
def test_sort_school(self): students = [ (3, ("Kyle",)), (4, ("Christopher", "Jennifer",)), (6, ("Kareem",)) ] for grade, students_in_grade in students: for student in students_in_grade: self.school.add(student, grade) result = self.school.sort() # Attempts to catch false positives self.assertTrue(isinstance(result, Sequence) or isinstance(result, GeneratorType) or callable(getattr(result, '__reversed__', False))) result_list = list(result.items() if hasattr(result, "items") else result) self.assertEqual(result_list, students)
def test_iter_files_simple(): gen = utils.iter_files(TEST_DIR, ['py']) assert isinstance(gen, types.GeneratorType) assert len(list(gen)) == 1
def new_instance(type, frum, schema=None): """ Factory! """ if not type2container: _delayed_imports() if isinstance(frum, Container): return frum elif isinstance(frum, _Cube): return frum elif isinstance(frum, _Query): return _run(frum) elif isinstance(frum, (list, set, GeneratorType)): return _ListContainer(frum) elif isinstance(frum, basestring): # USE DEFAULT STORAGE TO FIND Container if not config.default.settings: Log.error("expecting jx_python.query.config.default.settings to contain default elasticsearch connection info") settings = set_default( { "index": join_field(split_field(frum)[:1:]), "name": frum, }, config.default.settings ) settings.type = None # WE DO NOT WANT TO INFLUENCE THE TYPE BECAUSE NONE IS IN THE frum STRING ANYWAY return type2container["elasticsearch"](settings) elif isinstance(frum, Mapping): frum = wrap(frum) if frum.type and type2container[frum.type]: return type2container[frum.type](frum.settings) elif frum["from"]: frum = copy(frum) frum["from"] = Container(frum["from"]) return _Query.wrap(frum) else: Log.error("Do not know how to handle {{frum|json}}", frum=frum) else: Log.error("Do not know how to handle {{type}}", type=frum.__class__.__name__)
def expand(item): if isinstance(item, types.GeneratorType): return list(item) if isinstance(item, dict): return ['{0}={1}'.format(k, v) for k, v in item.items()] return item