我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用types.LambdaType()。
def new_looper(a, arg=None): """Helper function for nest() determines what sort of looper to make given a's type""" if isinstance(a,types.TupleType): if len(a) == 2: return RangeLooper(a[0],a[1]) elif len(a) == 3: return RangeLooper(a[0],a[1],a[2]) elif isinstance(a, types.BooleanType): return BooleanLooper(a) elif isinstance(a,types.IntType) or isinstance(a, types.LongType): return RangeLooper(a) elif isinstance(a, types.StringType) or isinstance(a, types.ListType): return ListLooper(a) elif isinstance(a, Looper): return a elif isinstance(a, types.LambdaType): return CalcField(a, arg)
def getSubcontroller(self, request, node, model, controllerName): controller = None cm = getattr(self, 'wcfactory_' + controllerName, None) if cm is None: cm = getattr(self, 'factory_' + controllerName, None) if cm is not None: warnings.warn("factory_ methods are deprecated; please use " "wcfactory_ instead", DeprecationWarning) if cm: if cm.func_code.co_argcount == 1 and not type(cm) == types.LambdaType: warnings.warn("A Controller Factory takes " "(request, node, model) " "now instead of (model)", DeprecationWarning) controller = controllerFactory(model) else: controller = cm(request, node, model) return controller
def test_build_lambda_for_reduce(self): test_input_rule = "Min(packet_size);Max(sampling_rate); Sum(traffic)" input_data_structure = StructType([StructField("sampling_rate", LongType()), StructField("packet_size", LongType()), StructField("traffic", LongType())]) test_input_operation = "reduce" config = TestConfig({"processing": {"aggregations": {"operation_type": test_input_operation, "rule": test_input_rule}}}) test_aggregation_processor = AggregationProcessor(config, input_data_structure) spark = SparkSession.builder.getOrCreate() sc = spark.sparkContext test_rdd = sc.parallelize([(4, 2, 1), (7, 1, 1), (1, 0, 1), (2, 5, 1), (1, 1, 1)]) test_aggregation_lambda = test_aggregation_processor.get_aggregation_lambda() self.assertIsInstance(test_aggregation_lambda, types.LambdaType, "get_aggregation_lambda should return " "lambda function") test_result = test_aggregation_lambda(test_rdd) self.assertTupleEqual(test_result, (7, 0, 5), "Error in aggregation operation. Tuple should be equal")
def get_config(self): if isinstance(self.function, python_types.LambdaType): function = func_dump(self.function) function_type = 'lambda' else: function = self.function.__name__ function_type = 'function' if isinstance(self._output_shape, python_types.LambdaType): output_shape = func_dump(self._output_shape) output_shape_type = 'lambda' elif callable(self._output_shape): output_shape = self._output_shape.__name__ output_shape_type = 'function' else: output_shape = self._output_shape output_shape_type = 'raw' config = {'function': function, 'function_type': function_type, 'output_shape': output_shape, 'output_shape_type': output_shape_type, 'arguments': self.arguments} base_config = super(Lambda, self).get_config() return dict(list(base_config.items()) + list(config.items()))
def obj2dict(obj, processed=None): """ converts any object into a dict, recursively """ processed = processed if not processed is None else set() if obj is None: return None if isinstance(obj,(int,long,str,unicode,float,bool)): return obj if id(obj) in processed: return '<reference>' processed.add(id(obj)) if isinstance(obj,(list,tuple)): return [obj2dict(item,processed) for item in obj] if not isinstance(obj, dict) and hasattr(obj,'__dict__'): obj = obj.__dict__ else: return repr(obj) return dict((key,obj2dict(value,processed)) for key,value in obj.items() if not key.startswith('_') and not type(value) in (types.FunctionType, types.LambdaType, types.BuiltinFunctionType, types.BuiltinMethodType))
def _compute_fields_for_operation(self, fields, to_compute): row = OpRow(self) for name, tup in iteritems(fields): field, value = tup if isinstance( value, ( types.LambdaType, types.FunctionType, types.MethodType, types.BuiltinFunctionType, types.BuiltinMethodType ) ): value = value() row.set_value(name, value, field) for name, field in to_compute: try: row.set_value(name, field.compute(row), field) except (KeyError, AttributeError): # error silently unless field is required! if field.required and name not in fields: raise RuntimeError( 'unable to compute required field: %s' % name) return row
def _get_name_from_func(func, other): if not isinstance(func, types.LambdaType): with contextlib.suppress(AttributeError): return func.__qualname__ with contextlib.suppress(AttributeError): return func.__name__ return other
def __setstate__(self, state): "Sets the state of the _View instance when unpickled." database, query, name_changes = state self.__database = database self.__query = types.LambdaType(query, sys.modules, '', (), ()) self.__name_changes = name_changes ########################################################################
def getSubview(self, request, node, model, viewName): """Get a sub-view from me. @returns: L{widgets.Widget} """ view = None vm = getattr(self, 'wvfactory_' + viewName, None) if vm is None: vm = getattr(self, 'factory_' + viewName, None) if vm is not None: warnings.warn("factory_ methods are deprecated; please use " "wvfactory_ instead", DeprecationWarning) if vm: if vm.func_code.co_argcount == 3 and not type(vm) == types.LambdaType: warnings.warn("wvfactory_ methods take (request, node, " "model) instead of (request, node) now. \n" "Please instantiate your widgets with a " "reference to model instead of self.model", DeprecationWarning) self.model = model view = vm(request, node) self.model = self.mainModel else: view = vm(request, node, model) setupMethod = getattr(self, 'wvupdate_' + viewName, None) if setupMethod: if view is None: view = widgets.Widget(model) view.setupMethods.append(setupMethod) return view
def __init__(self, lambd): assert isinstance(lambd, types.LambdaType) self.lambd = lambd
def test_build_lambda(self): mult_syntax_tree = SyntaxTree() mult_syntax_tree.operation = "mult" mult_syntax_tree.children = ["packet_size", "sampling_rate"] parsed_transformations = ["src_ip", FieldTransformation("destination_ip", "dst_ip"), FieldTransformation("traffic", mult_syntax_tree)] creator = TransformationCreator(self.data_structure, parsed_transformations, TransformationOperations({ "country": "./GeoLite2-Country.mmdb", "city": "./GeoLite2-City.mmdb", "asn": "./GeoLite2-ASN.mmdb" })) transformation = creator.build_lambda() self.assertIsInstance(transformation, types.LambdaType, "Transformation type should be lambda") spark = SparkSession.builder.getOrCreate() file = spark.read.csv(DATA_PATH, self.data_structure_pyspark) result = file.rdd.map(transformation) result = result.collect() self.assertListEqual(result, [("217.69.143.60", "91.221.61.183", 37888), ("91.221.61.168", "90.188.114.141", 34816), ("91.226.13.80", "5.136.78.36", 773120), ("192.168.30.2", "192.168.30.1", 94720), ("192.168.30.2", "192.168.30.1", 94720)], "List of tuples should be equal") spark.stop()
def test_build_lambda_with_nested_operations(self): mult_syntax_tree = SyntaxTree() mult_syntax_tree.operation = "mult" mult_syntax_tree.children = ["packet_size", "sampling_rate"] root_mult_st = SyntaxTree() root_mult_st.operation = "mult" root_mult_st.children = [mult_syntax_tree, "10"] parsed_transformations = ["src_ip", FieldTransformation("destination_ip", "dst_ip"), FieldTransformation("traffic", root_mult_st)] creator = TransformationCreator(self.data_structure, parsed_transformations, TransformationOperations({ "country": "./GeoLite2-Country.mmdb", "city": "./GeoLite2-City.mmdb", "asn": "./GeoLite2-ASN.mmdb" })) transformation = creator.build_lambda() self.assertIsInstance(transformation, types.LambdaType, "Transformation type should be lambda") spark = SparkSession.builder.getOrCreate() file = spark.read.csv(DATA_PATH, self.data_structure_pyspark) result = file.rdd.map(transformation) result = result.collect() self.assertListEqual(result, [("217.69.143.60", "91.221.61.183", 378880), ("91.221.61.168", "90.188.114.141", 348160), ("91.226.13.80", "5.136.78.36", 7731200), ("192.168.30.2", "192.168.30.1", 947200), ("192.168.30.2", "192.168.30.1", 947200)], "List of tuples should be equal") spark.stop()
def test__init__(self): config = Config(CONFIG_PATH) p = Processor(config) self.assertIsInstance(p.transformation, types.LambdaType, "Processor#transformation should be a lambda object")
def test_get_analysis_lambda_for_reduce(self, mock_analysis_record): # set up input data structure obtained after transformation and aggregation input_data_structure = {'rule': [{'key': False, 'func_name': 'Max', 'input_field': 'traffic'}, {'key': False, 'func_name': 'Max', 'input_field': 'ip_size'}, {'key': False, 'func_name': 'Sum', 'input_field': 'ip_size_sum'}], 'operation_type': 'reduceByKey'} # set up structure of config config = TestConfig( { "historical": { "method": "influx", "influx_options": { "measurement": "mock" } }, "alert": { "method": "stdout", "option": {} }, "time_delta": 20, "accuracy": 3, "rule": { "ip_size": 5, "ip_size_sum": 10, "traffic": 15 } }) detection = Analysis(config.content, Mock(), Mock(), input_data_structure) lambda_analysis = detection.get_analysis_lambda() self.assertIsInstance(lambda_analysis, types.LambdaType, "Failed. get_analysis_lambda should return a lambda object") lambda_analysis((3, 4, 5, 4)) self.assertTrue(mock_analysis_record.called, "Failed. The analysis_record didn't call in lambda that returned by get_analysis_lambda.")
def test_get_analysis_lambda_and_analysis_lambda_reduce(self, mock_data_delivery, mock_alert_factory, mock_historical_data): input_data_structure = {'rule': [{'key': False, 'func_name': 'Max', 'input_field': 'traffic'}, {'key': False, 'func_name': 'Max', 'input_field': 'ip_size'}, {'key': False, 'func_name': 'Sum', 'input_field': 'ip_size_sum'}], 'operation_type': 'reduceByKey'} enumerate_output_aggregation_field = {"traffic": 0, "ip_size": 1, "ip_size_sum": 2} mock_class = MagicMock() mock_analysis = MagicMock() mock_class.MockAnalysis.return_value = mock_analysis mock_analysis.analysis = MagicMock() sys.modules['analysis.MockAnalysis'] = mock_class mock_history_data_singleton = MagicMock() mock_data_delivery.return_value = mock_history_data_singleton obj_mock_alert_factory = MagicMock() mock_alert_factory.return_value = obj_mock_alert_factory obj_mock_historical_data = MagicMock() mock_historical_data.return_value = obj_mock_historical_data analysis_factory = AnalysisFactory(self._config, input_data_structure, enumerate_output_aggregation_field) test_lambda = analysis_factory.get_analysis_lambda() self.assertIsInstance(test_lambda, types.LambdaType, "get_analysis_lambda should return lambda function") test_lambda((6666, 7777, 8888)) mock_analysis.analysis.assert_called_with(obj_mock_historical_data, obj_mock_alert_factory)
def test_get_analysis_lambda_and_analysis_lambda_reduceByKey(self, mock_data_delivery, mock_alert_factory, mock_historical_data, mock_rdd): input_data_structure = {'rule': [{'key': True, 'func_name': '', 'input_field': 'ip'}, {'key': False, 'func_name': 'Max', 'input_field': 'ip_size'}, {'key': False, 'func_name': 'Sum', 'input_field': 'ip_size_sum'}], 'operation_type': 'reduceByKey'} enumerate_output_aggregation_field = {"ip_size": 1, "ip_size_sum": 2} def mock_foreachPartition(test_lambda): test_data = [(1, 2, 3)] return list(test_lambda(test_data)) mock_rdd.foreachPartition.side_effect = mock_foreachPartition mock_class = MagicMock() mock_analysis = MagicMock() mock_class.MockAnalysis.return_value = mock_analysis mock_analysis.analysis = MagicMock() sys.modules['analysis.MockAnalysis'] = mock_class mock_history_data_singleton = MagicMock() mock_data_delivery.return_value = mock_history_data_singleton obj_mock_alert_factory = MagicMock() mock_alert_factory.return_value = obj_mock_alert_factory obj_mock_historical_data = MagicMock() mock_historical_data.return_value = obj_mock_historical_data analysis_factory = AnalysisFactory(self._config, input_data_structure, enumerate_output_aggregation_field) test_lambda = analysis_factory.get_analysis_lambda() self.assertIsInstance(test_lambda, types.LambdaType, "get_analysis_lambda should return lambda function") test_lambda(mock_rdd) self.assertTrue(mock_rdd.foreachPartition.called, "Failed. The foreachPartition didn't call in lambda that returned by get_analysis_lambda.") mock_analysis.analysis.assert_called_with(obj_mock_historical_data, obj_mock_alert_factory)
def get_config(self): if isinstance(self.mode, python_types.LambdaType): mode = func_dump(self.mode) mode_type = 'lambda' elif callable(self.mode): mode = self.mode.__name__ mode_type = 'function' else: mode = self.mode mode_type = 'raw' if isinstance(self._output_shape, python_types.LambdaType): output_shape = func_dump(self._output_shape) output_shape_type = 'lambda' elif callable(self._output_shape): output_shape = self._output_shape.__name__ output_shape_type = 'function' else: output_shape = self._output_shape output_shape_type = 'raw' if isinstance(self._output_mask, python_types.LambdaType): output_mask = func_dump(self._output_mask) output_mask_type = 'lambda' elif callable(self._output_mask): output_mask = self._output_mask.__name__ output_mask_type = 'function' else: output_mask = self._output_mask output_mask_type = 'raw' return {'name': self.name, 'mode': mode, 'mode_type': mode_type, 'concat_axis': self.concat_axis, 'dot_axes': self.dot_axes, 'output_shape': output_shape, 'output_shape_type': output_shape_type, 'output_mask': output_mask, 'output_mask_type': output_mask_type, 'arguments': self.arguments}
def _extract_type(typ): if isinstance(typ, LambdaType): return typ() else: return typ
def random_or_specified_value(cls, value): """ Helper utility for choosing between a user-specified value for a field or a randomly generated value. :param value: Either a lambda type or a non-lambda type. :return: The value itself if not a lambda type, otherwise the value of the evaluated lambda (random value) """ return value() if isinstance(value, types.LambdaType) else value
def __init__(self, schema: LambdaType) -> None: if callable(schema) and isinstance(schema, LambdaType): self.schema = schema else: raise UndefinedSchema()
def get_argspec(ob): """Get a string describing the arguments for the given object.""" argspec = "" if ob is not None: if isinstance(ob, type): fob = _find_constructor(ob) if fob is None: fob = lambda: None elif isinstance(ob, types.MethodType): fob = ob.__func__ else: fob = ob if isinstance(fob, (types.FunctionType, types.LambdaType)): argspec = inspect.formatargspec(*inspect.getfullargspec(fob)) pat = re.compile('self\,?\s*') argspec = pat.sub("", argspec) doc = getattr(ob, "__doc__", "") if doc: doc = doc.lstrip() pos = doc.find("\n") if pos < 0 or pos > 70: pos = 70 if argspec: argspec += "\n" argspec += doc[:pos] return argspec ################################################# # # Test code #
def __init__(self, lambd): assert type(lambd) is types.LambdaType self.lambd = lambd
def getmethparlist(ob): "Get strings describing the arguments for the given object" argText1 = argText2 = "" # bit of a hack for methods - turn it into a function # but we drop the "self" param. if type(ob)==types.MethodType: fob = ob.im_func argOffset = 1 else: fob = ob argOffset = 0 # Try and build one for Python defined functions if type(fob) in [types.FunctionType, types.LambdaType]: try: counter = fob.func_code.co_argcount items2 = list(fob.func_code.co_varnames[argOffset:counter]) realArgs = fob.func_code.co_varnames[argOffset:counter] defaults = fob.func_defaults or [] defaults = list(map(lambda name: "=%s" % repr(name), defaults)) defaults = [""] * (len(realArgs)-len(defaults)) + defaults items1 = map(lambda arg, dflt: arg+dflt, realArgs, defaults) if fob.func_code.co_flags & 0x4: items1.append("*"+fob.func_code.co_varnames[counter]) items2.append("*"+fob.func_code.co_varnames[counter]) counter += 1 if fob.func_code.co_flags & 0x8: items1.append("**"+fob.func_code.co_varnames[counter]) items2.append("**"+fob.func_code.co_varnames[counter]) argText1 = ", ".join(items1) argText1 = "(%s)" % argText1 argText2 = ", ".join(items2) argText2 = "(%s)" % argText2 except: pass return argText1, argText2
def get_config(self): if isinstance(self.function, python_types.LambdaType): function = func_dump(self.function) function_type = 'lambda' else: function = self.function.__name__ function_type = 'function' config = { 'function': function, 'function_type': function_type, 'arguments': self.arguments } base_config = super(Lambda, self).get_config() return dict(list(base_config.items()) + list(config.items()))
def isFunType(obj): '''??obj????????lambda?? ''' return isinstance(obj,types.MethodType) or isinstance(obj,types.LambdaType)
def _getFuncResult(func,args): '''????????????????????????? ''' if not isinstance(func,types.MethodType) and not isinstance(func,types.LambdaType): raise TypeError("func type %s is not a MethodType or LambdaType" % type(func)) if dict == type(args): actret = func(**args) elif tuple == type(args): actret = func(*args) else: actret = func(args) return actret
def _waitForCompareResult(func,args,compareobj,timeout=10,interval=0.5): ''' ???????? :param actualfunc: ?????????? :param actargs: ??????????? :param compareobj: ?????????????????????? :return comparefunc: True or False :param timeout: ???? :param interval: ?????? :return type: tuple,(True,try_count,actual,expect) ''' start = time.time() waited = 0.0 try_count = 0 while True: try_count +=1 actret = _getFuncResult(func,args) if isinstance(compareobj,types.MethodType) or isinstance(compareobj,types.LambdaType): expret = _getFuncResult(compareobj,actret) if expret == True: return True,try_count,actret,expret else: expret = compareobj if actret == expret: return True,try_count,actret,expret waited = time.time() - start if waited < timeout: time.sleep(min(interval, timeout - waited)) else: return False,try_count,actret,expret