我们从Python开源项目中,提取了以下34个代码示例,用于说明如何使用collections.Container()。
def test_Container(self): non_samples = [None, 42, 3.14, 1j, (lambda: (yield))(), (x for x in []), ] for x in non_samples: self.assertNotIsInstance(x, Container) self.assertFalse(issubclass(type(x), Container), repr(type(x))) samples = [bytes(), str(), tuple(), list(), set(), frozenset(), dict(), dict().keys(), dict().items(), ] for x in samples: self.assertIsInstance(x, Container) self.assertTrue(issubclass(type(x), Container), repr(type(x))) self.validate_abstract_methods(Container, '__contains__') self.validate_isinstance(Container, '__contains__')
def test_Container(self): non_samples = [None, 42, 3.14, 1j, (lambda: (yield))(), (x for x in []), ] for x in non_samples: self.assertNotIsInstance(x, Container) self.assertFalse(issubclass(type(x), Container), repr(type(x))) samples = [str(), tuple(), list(), set(), frozenset(), dict(), dict().keys(), dict().items(), ] for x in samples: self.assertIsInstance(x, Container) self.assertTrue(issubclass(type(x), Container), repr(type(x))) self.validate_abstract_methods(Container, '__contains__') self.validate_isinstance(Container, '__contains__')
def test_abc_registry(self): d = dict(a=1) self.assertIsInstance(d.viewkeys(), collections.KeysView) self.assertIsInstance(d.viewkeys(), collections.MappingView) self.assertIsInstance(d.viewkeys(), collections.Set) self.assertIsInstance(d.viewkeys(), collections.Sized) self.assertIsInstance(d.viewkeys(), collections.Iterable) self.assertIsInstance(d.viewkeys(), collections.Container) self.assertIsInstance(d.viewvalues(), collections.ValuesView) self.assertIsInstance(d.viewvalues(), collections.MappingView) self.assertIsInstance(d.viewvalues(), collections.Sized) self.assertIsInstance(d.viewitems(), collections.ItemsView) self.assertIsInstance(d.viewitems(), collections.MappingView) self.assertIsInstance(d.viewitems(), collections.Set) self.assertIsInstance(d.viewitems(), collections.Sized) self.assertIsInstance(d.viewitems(), collections.Iterable) self.assertIsInstance(d.viewitems(), collections.Container)
def _listlike_guard(obj, name, iterable_only=False): """ We frequently require passed objects to support iteration or containment expressions, but not be strings. (Of course, strings support iteration and containment, but not usefully.) If the passed object is a string, we'll wrap it in a tuple and return it. If it's already an iterable, we'll return it as-is. Otherwise, we'll raise a TypeError. """ required_type = (_Iterable,) if iterable_only else (_Container, _Iterable) required_type_name = ' or '.join(t.__name__ for t in required_type) if not isinstance(obj, required_type): raise ValueError('{} must be of type {}'.format(name, required_type_name)) # at this point it is definitely the right type, but might be a string if isinstance(obj, basestring): logging.warning('{} passed as a string; should be list-like'.format(name)) return (obj,) return obj
def isdisjoint(self, other): r"""Return True if the set has no elements in common with other. Sets are disjoint iff their intersection is the empty set. >>> ms = Multiset('aab') >>> ms.isdisjoint('bc') False >>> ms.isdisjoint(Multiset('ccd')) True Args: other: The other set to check disjointedness. Can also be an :class:`~typing.Iterable`\[~T] or :class:`~typing.Mapping`\[~T, :class:`int`] which are then converted to :class:`Multiset`\[~T]. """ if isinstance(other, _sequence_types + (BaseMultiset, )): pass elif not isinstance(other, Container): other = self._as_multiset(other) return all(element not in other for element in self._elements.keys())
def deep_getsizeof(o, ids): """Find the memory footprint of a Python object This is a recursive function that drills down a Python object graph like a dictionary holding nested dictionaries with lists of lists and tuples and sets. The sys.getsizeof function does a shallow size of only. It counts each object inside a container as pointer only regardless of how big it really is. :param o: the object :param ids: :return: """ d = deep_getsizeof if id(o) in ids: return 0 r = getsizeof(o) ids.add(id(o)) if isinstance(o, str) or isinstance(0, unicode): return r if isinstance(o, Mapping): return r + sum(d(k, ids) + d(v, ids) for k, v in o.iteritems()) if isinstance(o, Container): return r + sum(d(x, ids) for x in o) return r
def non_string_collection(x): ''' A simple helper to allow string types to be distinguished from other collection types. ''' if isinstance(x, Container): if not isinstance(x, (str, bytes)): return True return False
def iscol(x): ''' Allow distinguishing between string types and "true" containers ''' if isinstance(x, Container): if not isinstance(x, (str, bytes)): return True return False ########################## # Higher order functions # ##########################
def test_direct_subclassing(self): for B in Hashable, Iterable, Iterator, Sized, Container, Callable: class C(B): pass self.assertTrue(issubclass(C, B)) self.assertFalse(issubclass(int, C))
def test_registration(self): for B in Hashable, Iterable, Iterator, Sized, Container, Callable: class C: __hash__ = None # Make sure it isn't hashable by default self.assertFalse(issubclass(C, B), B.__name__) B.register(C) self.assertTrue(issubclass(C, B))
def test_registration(self): for B in Hashable, Iterable, Iterator, Sized, Container, Callable: class C: __metaclass__ = type __hash__ = None # Make sure it isn't hashable by default self.assertFalse(issubclass(C, B), B.__name__) B.register(C) self.assertTrue(issubclass(C, B))
def test_multiple_inheritance(self): """ Issue #96 (for newdict instead of newobject) """ import collections class Base(dict): pass class Foo(Base, collections.Container): def __contains__(self, item): return False
def test_multiple_inheritance(self): """ Issue #96 (for newstr instead of newobject) """ import collections class Base(str): pass class Foo(Base, collections.Container): def __contains__(self, item): return False
def test_multiple_inheritance(self): """ Issue #96 (for newint instead of newobject) """ import collections class Base(int): pass class Foo(Base, collections.Container): def __add__(self, other): return 0
def test_multiple_inheritance(self): """ Issue #96 (for newdict instead of newobject) """ import collections class Base(list): pass class Foo(Base, collections.Container): def __contains__(self, item): return False
def test_multiple_inheritance(self): """ Issue #96 """ import collections class Base(object): pass class Foo(Base, collections.Container): def __contains__(self, item): return False
def __instancecheck__(self, instance): return (isinstance(instance, collections.Iterable) and isinstance(instance, collections.Sized) and isinstance(instance, collections.Container) and all(isinstance(x, self._type) for x in instance))
def _condition(self, container): return all([ isinstance(container, Container), isinstance(container, Iterable), not isinstance(container, six.string_types), not isinstance(container, Mapping) ])
def deep_getsizeof(obj): """Find the memory footprint of a Python object. Based on code from code.tutsplus.com: http://goo.gl/fZ0DXK This is a recursive function that drills down a Python object graph like a dictionary holding nested dictionaries with lists of lists and tuples and sets. The sys.getsizeof function does a shallow size of only. It counts each object inside a container as pointer only regardless of how big it really is. """ ids = set() def size(o): if id(o) in ids: return 0 r = sys.getsizeof(o) ids.add(id(o)) if isinstance(o, (str, bytes, bytearray, array.array)): return r if isinstance(o, Mapping): return r + sum(size(k) + size(v) for k, v in o.items()) if isinstance(o, Container): return r + sum(size(x) for x in o) return r return size(obj)
def __init__(self, returns, *args, **kwargs): if isinstance(returns, collections.Container): all = type(returns)(as_op(ret) for ret in returns) elif isinstance(returns, Op): all = [as_op(returns)] elif returns is not None: raise ValueError() else: all = [] self.values = all self.returns = returns super(ComputationOp, self).__init__(all=all, **kwargs) def is_input(arg): return arg.tensor.is_input placeholders = self.placeholders() if len(args) == 1 and args[0] == 'all': args = placeholders args = tuple(as_op(arg) for arg in args) arg_tensors = set(arg.tensor for arg in args) missing_tensors = [t for t in placeholders - arg_tensors] if len(missing_tensors) > 0: raise ValueError(("All used placeholders must be supplied to a " "computation. Currently missed {}." ).format(missing_tensors)) for arg in args: if not (arg.tensor.is_input): raise ValueError(( 'The arguments to a computation must all be Ops with property ' 'is_input=True, but the op passed had is_input=False.' 'In most cases you want to pass placeholder ops in as arguments. ' '{op} was passed in, of type {op_type}.' ).format( op=arg, op_type=arg.__class__.__name__, )) self.parameters = args for arg in args: self.add_control_dep(arg)
def process_tree(value, key=None, parent_key=None): def _process_leaf(value, key=None, parent_key=None): if key == 'description' and parent_key != 'properties': return DescriptionContent(value.strip()) return value def _enforce_strict_types(dictionary): if dictionary.get('type') == 'object': dictionary.setdefault('additionalProperties', False) elif dictionary.get('type') == 'string': dictionary.setdefault('minLength', 1) elif dictionary.get('type') == 'array': dictionary.setdefault('uniqueItems', True) dictionary.setdefault('minItems', 1) return dictionary def _ensure_values_have_types(properties, parent_key): for key, val in properties.iteritems(): if not val.get('type') and not val.get('$ref'): warnings.warn( u'"{}" field of "{}" does not have a type'.format( key, parent_key ) ) def _is_leaf(value): return (not isinstance(value, collections.Container) or isinstance(value, basestring)) if _is_leaf(value): return _process_leaf(value, key, parent_key) elif isinstance(value, list): return [process_tree(v) for v in value] elif isinstance(value, dict): value = _enforce_strict_types(value) if key == 'properties': _ensure_values_have_types(value, parent_key) return {k: process_tree(v, k, key) for k, v in value.iteritems()} else: raise TypeError(u"'{}' has unexpected type: {}".format( value, type(value).__name__))
def hash_params(params): """ Construct a data structure of parameters that is hashable. This requires changing any mutable data structures into immutable ones. We chose a frozenset because role parameters have to be unique. .. warning:: this does not handle unhashable scalars. Two things mitigate that limitation: 1) There shouldn't be any unhashable scalars specified in the yaml 2) Our only choice would be to return an error anyway. """ # Any container is unhashable if it contains unhashable items (for # instance, tuple() is a Hashable subclass but if it contains a dict, it # cannot be hashed) if isinstance(params, collections.Container) and not isinstance(params, (text_type, binary_type)): if isinstance(params, collections.Mapping): try: # Optimistically hope the contents are all hashable new_params = frozenset(params.items()) except TypeError: new_params = set() for k, v in params.items(): # Hash each entry individually new_params.update((k, hash_params(v))) new_params = frozenset(new_params) elif isinstance(params, (collections.Set, collections.Sequence)): try: # Optimistically hope the contents are all hashable new_params = frozenset(params) except TypeError: new_params = set() for v in params: # Hash each entry individually new_params.update(hash_params(v)) new_params = frozenset(new_params) else: # This is just a guess. new_params = frozenset(params) return new_params # Note: We do not handle unhashable scalars but our only choice would be # to raise an error there anyway. return frozenset((params,))