python类LambdaType()的实例源码

recipe-473818.py 文件源码 项目:code 作者: ActiveState 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def new_looper(a, arg=None):
    """Helper function for nest()
    determines what sort of looper to make given a's type"""
    if isinstance(a,types.TupleType):
        if len(a) == 2:
            return RangeLooper(a[0],a[1])
        elif len(a) == 3:
            return RangeLooper(a[0],a[1],a[2])
    elif isinstance(a, types.BooleanType):
        return BooleanLooper(a)
    elif isinstance(a,types.IntType) or isinstance(a, types.LongType):
        return RangeLooper(a)
    elif isinstance(a, types.StringType) or isinstance(a, types.ListType):
        return ListLooper(a)
    elif isinstance(a, Looper):
        return a
    elif isinstance(a, types.LambdaType):
        return CalcField(a, arg)
controller.py 文件源码 项目:hostapd-mana 作者: adde88 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def getSubcontroller(self, request, node, model, controllerName):
        controller = None
        cm = getattr(self, 'wcfactory_' +
                                    controllerName, None)
        if cm is None:
            cm = getattr(self, 'factory_' +
                                         controllerName, None)
            if cm is not None:
                warnings.warn("factory_ methods are deprecated; please use "
                              "wcfactory_ instead", DeprecationWarning)
        if cm:
            if cm.func_code.co_argcount == 1 and not type(cm) == types.LambdaType:
                warnings.warn("A Controller Factory takes "
                              "(request, node, model) "
                              "now instead of (model)", DeprecationWarning)
                controller = controllerFactory(model)
            else:
                controller = cm(request, node, model)
        return controller
test_aggregationProcessor.py 文件源码 项目:kafka-spark-influx-csv-analysis 作者: bwsw 项目源码 文件源码 阅读 41 收藏 0 点赞 0 评论 0
def test_build_lambda_for_reduce(self):
        test_input_rule = "Min(packet_size);Max(sampling_rate); Sum(traffic)"
        input_data_structure = StructType([StructField("sampling_rate", LongType()),
                                           StructField("packet_size", LongType()),
                                           StructField("traffic", LongType())])
        test_input_operation = "reduce"
        config = TestConfig({"processing": {"aggregations": {"operation_type": test_input_operation,
                                                             "rule": test_input_rule}}})
        test_aggregation_processor = AggregationProcessor(config, input_data_structure)

        spark = SparkSession.builder.getOrCreate()
        sc = spark.sparkContext
        test_rdd = sc.parallelize([(4, 2, 1), (7, 1, 1), (1, 0, 1), (2, 5, 1), (1, 1, 1)])
        test_aggregation_lambda = test_aggregation_processor.get_aggregation_lambda()

        self.assertIsInstance(test_aggregation_lambda, types.LambdaType, "get_aggregation_lambda should return "
                                                                         "lambda function")

        test_result = test_aggregation_lambda(test_rdd)
        self.assertTupleEqual(test_result, (7, 0, 5), "Error in aggregation operation. Tuple should be equal")
core.py 文件源码 项目:keras 作者: GeekLiB 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def get_config(self):
        if isinstance(self.function, python_types.LambdaType):
            function = func_dump(self.function)
            function_type = 'lambda'
        else:
            function = self.function.__name__
            function_type = 'function'

        if isinstance(self._output_shape, python_types.LambdaType):
            output_shape = func_dump(self._output_shape)
            output_shape_type = 'lambda'
        elif callable(self._output_shape):
            output_shape = self._output_shape.__name__
            output_shape_type = 'function'
        else:
            output_shape = self._output_shape
            output_shape_type = 'raw'

        config = {'function': function,
                  'function_type': function_type,
                  'output_shape': output_shape,
                  'output_shape_type': output_shape_type,
                  'arguments': self.arguments}
        base_config = super(Lambda, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))
saml2_auth.py 文件源码 项目:touch-pay-client 作者: HackPucBemobi 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def obj2dict(obj, processed=None):
    """                                                                        
    converts any object into a dict, recursively                               
    """
    processed = processed if not processed is None else set()
    if obj is None:
        return None
    if isinstance(obj,(int,long,str,unicode,float,bool)):
        return obj
    if id(obj) in processed:
        return '<reference>'
    processed.add(id(obj))
    if isinstance(obj,(list,tuple)):
        return [obj2dict(item,processed) for item in obj]
    if not isinstance(obj, dict) and hasattr(obj,'__dict__'):
        obj = obj.__dict__
    else:
        return repr(obj)
    return dict((key,obj2dict(value,processed)) for key,value in obj.items()
                if not key.startswith('_') and
                not type(value) in (types.FunctionType,
                                    types.LambdaType,
                                    types.BuiltinFunctionType,
                                    types.BuiltinMethodType))
objects.py 文件源码 项目:touch-pay-client 作者: HackPucBemobi 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def _compute_fields_for_operation(self, fields, to_compute):
        row = OpRow(self)
        for name, tup in iteritems(fields):
            field, value = tup
            if isinstance(
                value, (
                    types.LambdaType, types.FunctionType, types.MethodType,
                    types.BuiltinFunctionType, types.BuiltinMethodType
                )
            ):
                value = value()
            row.set_value(name, value, field)
        for name, field in to_compute:
            try:
                row.set_value(name, field.compute(row), field)
            except (KeyError, AttributeError):
                # error silently unless field is required!
                if field.required and name not in fields:
                    raise RuntimeError(
                        'unable to compute required field: %s' % name)
        return row
saml2_auth.py 文件源码 项目:true_review_web2py 作者: lucadealfaro 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def obj2dict(obj, processed=None):
    """                                                                        
    converts any object into a dict, recursively                               
    """
    processed = processed if not processed is None else set()
    if obj is None:
        return None
    if isinstance(obj,(int,long,str,unicode,float,bool)):
        return obj
    if id(obj) in processed:
        return '<reference>'
    processed.add(id(obj))
    if isinstance(obj,(list,tuple)):
        return [obj2dict(item,processed) for item in obj]
    if not isinstance(obj, dict) and hasattr(obj,'__dict__'):
        obj = obj.__dict__
    else:
        return repr(obj)
    return dict((key,obj2dict(value,processed)) for key,value in obj.items()
                if not key.startswith('_') and
                not type(value) in (types.FunctionType,
                                    types.LambdaType,
                                    types.BuiltinFunctionType,
                                    types.BuiltinMethodType))
saml2_auth.py 文件源码 项目:spc 作者: whbrewer 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def obj2dict(obj, processed=None):
    """                                                                        
    converts any object into a dict, recursively                               
    """
    processed = processed if not processed is None else set()
    if obj is None:
        return None
    if isinstance(obj,(int,long,str,unicode,float,bool)):
        return obj
    if id(obj) in processed:
        return '<reference>'
    processed.add(id(obj))
    if isinstance(obj,(list,tuple)):
        return [obj2dict(item,processed) for item in obj]
    if not isinstance(obj, dict) and hasattr(obj,'__dict__'):
        obj = obj.__dict__
    else:
        return repr(obj)
    return dict((key,obj2dict(value,processed)) for key,value in obj.items()
                if not key.startswith('_') and
                not type(value) in (types.FunctionType,
                                    types.LambdaType,
                                    types.BuiltinFunctionType,
                                    types.BuiltinMethodType))
core.py 文件源码 项目:deep-learning-keras-projects 作者: jasmeetsb 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def get_config(self):
        if isinstance(self.function, python_types.LambdaType):
            function = func_dump(self.function)
            function_type = 'lambda'
        else:
            function = self.function.__name__
            function_type = 'function'

        if isinstance(self._output_shape, python_types.LambdaType):
            output_shape = func_dump(self._output_shape)
            output_shape_type = 'lambda'
        elif callable(self._output_shape):
            output_shape = self._output_shape.__name__
            output_shape_type = 'function'
        else:
            output_shape = self._output_shape
            output_shape_type = 'raw'

        config = {'function': function,
                  'function_type': function_type,
                  'output_shape': output_shape,
                  'output_shape_type': output_shape_type,
                  'arguments': self.arguments}
        base_config = super(Lambda, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))
saml2_auth.py 文件源码 项目:Problematica-public 作者: TechMaz 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def obj2dict(obj, processed=None):
    """                                                                        
    converts any object into a dict, recursively                               
    """
    processed = processed if not processed is None else set()
    if obj is None:
        return None
    if isinstance(obj,(int,long,str,unicode,float,bool)):
        return obj
    if id(obj) in processed:
        return '<reference>'
    processed.add(id(obj))
    if isinstance(obj,(list,tuple)):
        return [obj2dict(item,processed) for item in obj]
    if not isinstance(obj, dict) and hasattr(obj,'__dict__'):
        obj = obj.__dict__
    else:
        return repr(obj)
    return dict((key,obj2dict(value,processed)) for key,value in obj.items()
                if not key.startswith('_') and
                not type(value) in (types.FunctionType,
                                    types.LambdaType,
                                    types.BuiltinFunctionType,
                                    types.BuiltinMethodType))
controller.py 文件源码 项目:sslstrip-hsts-openwrt 作者: adde88 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def getSubcontroller(self, request, node, model, controllerName):
        controller = None
        cm = getattr(self, 'wcfactory_' +
                                    controllerName, None)
        if cm is None:
            cm = getattr(self, 'factory_' +
                                         controllerName, None)
            if cm is not None:
                warnings.warn("factory_ methods are deprecated; please use "
                              "wcfactory_ instead", DeprecationWarning)
        if cm:
            if cm.func_code.co_argcount == 1 and not type(cm) == types.LambdaType:
                warnings.warn("A Controller Factory takes "
                              "(request, node, model) "
                              "now instead of (model)", DeprecationWarning)
                controller = controllerFactory(model)
            else:
                controller = cm(request, node, model)
        return controller
core.py 文件源码 项目:keras-customized 作者: ambrite 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def get_config(self):
        if isinstance(self.function, python_types.LambdaType):
            function = func_dump(self.function)
            function_type = 'lambda'
        else:
            function = self.function.__name__
            function_type = 'function'

        if isinstance(self._output_shape, python_types.LambdaType):
            output_shape = func_dump(self._output_shape)
            output_shape_type = 'lambda'
        elif callable(self._output_shape):
            output_shape = self._output_shape.__name__
            output_shape_type = 'function'
        else:
            output_shape = self._output_shape
            output_shape_type = 'raw'

        config = {'function': function,
                  'function_type': function_type,
                  'output_shape': output_shape,
                  'output_shape_type': output_shape_type,
                  'arguments': self.arguments}
        base_config = super(Lambda, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))
saml2_auth.py 文件源码 项目:rekall-agent-server 作者: rekall-innovations 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def obj2dict(obj, processed=None):
    """                                                                        
    converts any object into a dict, recursively                               
    """
    processed = processed if not processed is None else set()
    if obj is None:
        return None
    if isinstance(obj,(int,long,str,unicode,float,bool)):
        return obj
    if id(obj) in processed:
        return '<reference>'
    processed.add(id(obj))
    if isinstance(obj,(list,tuple)):
        return [obj2dict(item,processed) for item in obj]
    if not isinstance(obj, dict) and hasattr(obj,'__dict__'):
        obj = obj.__dict__
    else:
        return repr(obj)
    return dict((key,obj2dict(value,processed)) for key,value in obj.items()
                if not key.startswith('_') and
                not type(value) in (types.FunctionType,
                                    types.LambdaType,
                                    types.BuiltinFunctionType,
                                    types.BuiltinMethodType))
objects.py 文件源码 项目:rekall-agent-server 作者: rekall-innovations 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def _compute_fields_for_operation(self, fields, to_compute):
        row = OpRow(self)
        for name, tup in iteritems(fields):
            field, value = tup
            if isinstance(
                value, (
                    types.LambdaType, types.FunctionType, types.MethodType,
                    types.BuiltinFunctionType, types.BuiltinMethodType
                )
            ):
                value = value()
            row.set_value(name, value, field)
        for name, field in to_compute:
            try:
                row.set_value(name, field.compute(row), field)
            except (KeyError, AttributeError):
                # error silently unless field is required!
                if field.required and name not in fields:
                    raise RuntimeError(
                        'unable to compute required field: %s' % name)
        return row
saml2_auth.py 文件源码 项目:slugiot-client 作者: slugiot 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def obj2dict(obj, processed=None):
    """                                                                        
    converts any object into a dict, recursively                               
    """
    processed = processed if not processed is None else set()
    if obj is None:
        return None
    if isinstance(obj,(int,long,str,unicode,float,bool)):
        return obj
    if id(obj) in processed:
        return '<reference>'
    processed.add(id(obj))
    if isinstance(obj,(list,tuple)):
        return [obj2dict(item,processed) for item in obj]
    if not isinstance(obj, dict) and hasattr(obj,'__dict__'):
        obj = obj.__dict__
    else:
        return repr(obj)
    return dict((key,obj2dict(value,processed)) for key,value in obj.items()
                if not key.startswith('_') and
                not type(value) in (types.FunctionType,
                                    types.LambdaType,
                                    types.BuiltinFunctionType,
                                    types.BuiltinMethodType))
core.py 文件源码 项目:keras 作者: NVIDIA 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def get_config(self):
        if isinstance(self.function, python_types.LambdaType):
            function = func_dump(self.function)
            function_type = 'lambda'
        else:
            function = self.function.__name__
            function_type = 'function'

        if isinstance(self._output_shape, python_types.LambdaType):
            output_shape = func_dump(self._output_shape)
            output_shape_type = 'lambda'
        elif callable(self._output_shape):
            output_shape = self._output_shape.__name__
            output_shape_type = 'function'
        else:
            output_shape = self._output_shape
            output_shape_type = 'raw'

        config = {'function': function,
                  'function_type': function_type,
                  'output_shape': output_shape,
                  'output_shape_type': output_shape_type,
                  'arguments': self.arguments}
        base_config = super(Lambda, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))
core.py 文件源码 项目:keras_superpixel_pooling 作者: parag2489 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def get_config(self):
        if isinstance(self.function, python_types.LambdaType):
            function = func_dump(self.function)
            function_type = 'lambda'
        else:
            function = self.function.__name__
            function_type = 'function'

        if isinstance(self._output_shape, python_types.LambdaType):
            output_shape = func_dump(self._output_shape)
            output_shape_type = 'lambda'
        elif callable(self._output_shape):
            output_shape = self._output_shape.__name__
            output_shape_type = 'function'
        else:
            output_shape = self._output_shape
            output_shape_type = 'raw'

        config = {'function': function,
                  'function_type': function_type,
                  'output_shape': output_shape,
                  'output_shape_type': output_shape_type,
                  'arguments': self.arguments}
        base_config = super(Lambda, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))
saml2_auth.py 文件源码 项目:StuffShare 作者: StuffShare 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def obj2dict(obj, processed=None):
    """                                                                        
    converts any object into a dict, recursively                               
    """
    processed = processed if not processed is None else set()
    if obj is None:
        return None
    if isinstance(obj,(int,long,str,unicode,float,bool)):
        return obj
    if id(obj) in processed:
        return '<reference>'
    processed.add(id(obj))
    if isinstance(obj,(list,tuple)):
        return [obj2dict(item,processed) for item in obj]
    if not isinstance(obj, dict) and hasattr(obj,'__dict__'):
        obj = obj.__dict__
    else:
        return repr(obj)
    return dict((key,obj2dict(value,processed)) for key,value in obj.items()
                if not key.startswith('_') and
                not type(value) in (types.FunctionType,
                                    types.LambdaType,
                                    types.BuiltinFunctionType,
                                    types.BuiltinMethodType))
clauses.py 文件源码 项目:speccer 作者: bensimner 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def _get_name_from_func(func, other):
    if not isinstance(func, types.LambdaType):
        with contextlib.suppress(AttributeError):
            return func.__qualname__

        with contextlib.suppress(AttributeError):
            return func.__name__

    return other
recipe-577825.py 文件源码 项目:code 作者: ActiveState 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def __setstate__(self, state):
        "Sets the state of the _View instance when unpickled."
        database, query, name_changes = state
        self.__database = database
        self.__query = types.LambdaType(query, sys.modules, '', (), ())
        self.__name_changes = name_changes

    ########################################################################
view.py 文件源码 项目:hostapd-mana 作者: adde88 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def getSubview(self, request, node, model, viewName):
        """Get a sub-view from me.

        @returns: L{widgets.Widget}
        """
        view = None
        vm = getattr(self, 'wvfactory_' + viewName, None)
        if vm is None:
            vm = getattr(self, 'factory_' + viewName, None)
            if vm is not None:
                warnings.warn("factory_ methods are deprecated; please use "
                              "wvfactory_ instead", DeprecationWarning)
        if vm:
            if vm.func_code.co_argcount == 3 and not type(vm) == types.LambdaType:
                 warnings.warn("wvfactory_ methods take (request, node, "
                               "model) instead of (request, node) now. \n"
                               "Please instantiate your widgets with a "
                               "reference to model instead of self.model",
                               DeprecationWarning)
                 self.model = model
                 view = vm(request, node)
                 self.model = self.mainModel
            else:
                view = vm(request, node, model)

        setupMethod = getattr(self, 'wvupdate_' + viewName, None)
        if setupMethod:
            if view is None:
                view = widgets.Widget(model)
            view.setupMethods.append(setupMethod)
        return view
augmentations.py 文件源码 项目:ssd.pytorch 作者: amdegroot 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def __init__(self, lambd):
        assert isinstance(lambd, types.LambdaType)
        self.lambd = lambd
test_transformation_creator.py 文件源码 项目:kafka-spark-influx-csv-analysis 作者: bwsw 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def test_build_lambda(self):
        mult_syntax_tree = SyntaxTree()
        mult_syntax_tree.operation = "mult"
        mult_syntax_tree.children = ["packet_size", "sampling_rate"]

        parsed_transformations = ["src_ip", FieldTransformation("destination_ip", "dst_ip"),
                                  FieldTransformation("traffic", mult_syntax_tree)]

        creator = TransformationCreator(self.data_structure, parsed_transformations, TransformationOperations({
            "country": "./GeoLite2-Country.mmdb",
            "city": "./GeoLite2-City.mmdb",
            "asn": "./GeoLite2-ASN.mmdb"
        }))

        transformation = creator.build_lambda()

        self.assertIsInstance(transformation, types.LambdaType, "Transformation type should be lambda")

        spark = SparkSession.builder.getOrCreate()
        file = spark.read.csv(DATA_PATH, self.data_structure_pyspark)

        result = file.rdd.map(transformation)

        result = result.collect()

        self.assertListEqual(result, [("217.69.143.60", "91.221.61.183", 37888),
                                      ("91.221.61.168", "90.188.114.141", 34816),
                                      ("91.226.13.80", "5.136.78.36", 773120),
                                      ("192.168.30.2", "192.168.30.1", 94720),
                                      ("192.168.30.2", "192.168.30.1", 94720)], "List of tuples should be equal")

        spark.stop()
test_transformation_creator.py 文件源码 项目:kafka-spark-influx-csv-analysis 作者: bwsw 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def test_build_lambda_with_nested_operations(self):
        mult_syntax_tree = SyntaxTree()
        mult_syntax_tree.operation = "mult"
        mult_syntax_tree.children = ["packet_size", "sampling_rate"]

        root_mult_st = SyntaxTree()
        root_mult_st.operation = "mult"
        root_mult_st.children = [mult_syntax_tree, "10"]

        parsed_transformations = ["src_ip", FieldTransformation("destination_ip", "dst_ip"),
                                  FieldTransformation("traffic", root_mult_st)]

        creator = TransformationCreator(self.data_structure, parsed_transformations, TransformationOperations({
            "country": "./GeoLite2-Country.mmdb",
            "city": "./GeoLite2-City.mmdb",
            "asn": "./GeoLite2-ASN.mmdb"
        }))

        transformation = creator.build_lambda()

        self.assertIsInstance(transformation, types.LambdaType, "Transformation type should be lambda")

        spark = SparkSession.builder.getOrCreate()
        file = spark.read.csv(DATA_PATH, self.data_structure_pyspark)

        result = file.rdd.map(transformation)

        result = result.collect()

        self.assertListEqual(result, [("217.69.143.60", "91.221.61.183", 378880),
                                      ("91.221.61.168", "90.188.114.141", 348160),
                                      ("91.226.13.80", "5.136.78.36", 7731200),
                                      ("192.168.30.2", "192.168.30.1", 947200),
                                      ("192.168.30.2", "192.168.30.1", 947200)],
                             "List of tuples should be equal")

        spark.stop()
test_processor.py 文件源码 项目:kafka-spark-influx-csv-analysis 作者: bwsw 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def test__init__(self):
        config = Config(CONFIG_PATH)
        p = Processor(config)
        self.assertIsInstance(p.transformation, types.LambdaType, "Processor#transformation should be a lambda object")
test_analysis.py 文件源码 项目:kafka-spark-influx-csv-analysis 作者: bwsw 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def test_get_analysis_lambda_for_reduce(self, mock_analysis_record):
        # set up input data structure obtained after transformation and aggregation
        input_data_structure = {'rule': [{'key': False, 'func_name': 'Max', 'input_field': 'traffic'},
                                         {'key': False, 'func_name': 'Max', 'input_field': 'ip_size'},
                                         {'key': False, 'func_name': 'Sum', 'input_field': 'ip_size_sum'}],
                                'operation_type': 'reduceByKey'}
        # set up structure of config
        config = TestConfig(
            {
                "historical": {
                    "method": "influx",
                    "influx_options": {
                        "measurement": "mock"
                    }
                },
                "alert": {
                    "method": "stdout",
                    "option": {}
                },
                "time_delta": 20,
                "accuracy": 3,
                "rule": {
                    "ip_size": 5,
                    "ip_size_sum": 10,
                    "traffic": 15
                }

            })

        detection = Analysis(config.content, Mock(), Mock(),
                             input_data_structure)
        lambda_analysis = detection.get_analysis_lambda()
        self.assertIsInstance(lambda_analysis, types.LambdaType,
                              "Failed. get_analysis_lambda should return a lambda object")

        lambda_analysis((3, 4, 5, 4))
        self.assertTrue(mock_analysis_record.called,
                        "Failed. The analysis_record didn't call in lambda that returned by get_analysis_lambda.")
test_analysisFactory.py 文件源码 项目:kafka-spark-influx-csv-analysis 作者: bwsw 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def test_get_analysis_lambda_and_analysis_lambda_reduce(self, mock_data_delivery, mock_alert_factory,
                                                            mock_historical_data):
        input_data_structure = {'rule': [{'key': False, 'func_name': 'Max', 'input_field': 'traffic'},
                                         {'key': False, 'func_name': 'Max', 'input_field': 'ip_size'},
                                         {'key': False, 'func_name': 'Sum', 'input_field': 'ip_size_sum'}],
                                'operation_type': 'reduceByKey'}
        enumerate_output_aggregation_field = {"traffic": 0, "ip_size": 1, "ip_size_sum": 2}

        mock_class = MagicMock()
        mock_analysis = MagicMock()
        mock_class.MockAnalysis.return_value = mock_analysis
        mock_analysis.analysis = MagicMock()
        sys.modules['analysis.MockAnalysis'] = mock_class
        mock_history_data_singleton = MagicMock()
        mock_data_delivery.return_value = mock_history_data_singleton
        obj_mock_alert_factory = MagicMock()
        mock_alert_factory.return_value = obj_mock_alert_factory
        obj_mock_historical_data = MagicMock()
        mock_historical_data.return_value = obj_mock_historical_data

        analysis_factory = AnalysisFactory(self._config, input_data_structure, enumerate_output_aggregation_field)

        test_lambda = analysis_factory.get_analysis_lambda()

        self.assertIsInstance(test_lambda, types.LambdaType, "get_analysis_lambda should return lambda function")

        test_lambda((6666, 7777, 8888))

        mock_analysis.analysis.assert_called_with(obj_mock_historical_data, obj_mock_alert_factory)
test_analysisFactory.py 文件源码 项目:kafka-spark-influx-csv-analysis 作者: bwsw 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def test_get_analysis_lambda_and_analysis_lambda_reduceByKey(self, mock_data_delivery, mock_alert_factory,
                                                                 mock_historical_data, mock_rdd):
        input_data_structure = {'rule': [{'key': True, 'func_name': '', 'input_field': 'ip'},
                                         {'key': False, 'func_name': 'Max', 'input_field': 'ip_size'},
                                         {'key': False, 'func_name': 'Sum', 'input_field': 'ip_size_sum'}],
                                'operation_type': 'reduceByKey'}
        enumerate_output_aggregation_field = {"ip_size": 1, "ip_size_sum": 2}

        def mock_foreachPartition(test_lambda):
            test_data = [(1, 2, 3)]
            return list(test_lambda(test_data))

        mock_rdd.foreachPartition.side_effect = mock_foreachPartition

        mock_class = MagicMock()
        mock_analysis = MagicMock()
        mock_class.MockAnalysis.return_value = mock_analysis
        mock_analysis.analysis = MagicMock()
        sys.modules['analysis.MockAnalysis'] = mock_class
        mock_history_data_singleton = MagicMock()
        mock_data_delivery.return_value = mock_history_data_singleton
        obj_mock_alert_factory = MagicMock()
        mock_alert_factory.return_value = obj_mock_alert_factory
        obj_mock_historical_data = MagicMock()
        mock_historical_data.return_value = obj_mock_historical_data

        analysis_factory = AnalysisFactory(self._config, input_data_structure, enumerate_output_aggregation_field)

        test_lambda = analysis_factory.get_analysis_lambda()

        self.assertIsInstance(test_lambda, types.LambdaType, "get_analysis_lambda should return lambda function")

        test_lambda(mock_rdd)

        self.assertTrue(mock_rdd.foreachPartition.called,
                        "Failed. The foreachPartition didn't call in lambda that returned by get_analysis_lambda.")

        mock_analysis.analysis.assert_called_with(obj_mock_historical_data, obj_mock_alert_factory)
topology.py 文件源码 项目:keras 作者: GeekLiB 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def get_config(self):
        if isinstance(self.mode, python_types.LambdaType):
            mode = func_dump(self.mode)
            mode_type = 'lambda'
        elif callable(self.mode):
            mode = self.mode.__name__
            mode_type = 'function'
        else:
            mode = self.mode
            mode_type = 'raw'

        if isinstance(self._output_shape, python_types.LambdaType):
            output_shape = func_dump(self._output_shape)
            output_shape_type = 'lambda'
        elif callable(self._output_shape):
            output_shape = self._output_shape.__name__
            output_shape_type = 'function'
        else:
            output_shape = self._output_shape
            output_shape_type = 'raw'

        if isinstance(self._output_mask, python_types.LambdaType):
            output_mask = func_dump(self._output_mask)
            output_mask_type = 'lambda'
        elif callable(self._output_mask):
            output_mask = self._output_mask.__name__
            output_mask_type = 'function'
        else:
            output_mask = self._output_mask
            output_mask_type = 'raw'

        return {'name': self.name,
                'mode': mode,
                'mode_type': mode_type,
                'concat_axis': self.concat_axis,
                'dot_axes': self.dot_axes,
                'output_shape': output_shape,
                'output_shape_type': output_shape_type,
                'output_mask': output_mask,
                'output_mask_type': output_mask_type,
                'arguments': self.arguments}
xml_types.py 文件源码 项目:third_person_im 作者: bstadie 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def _extract_type(typ):
    if isinstance(typ, LambdaType):
        return typ()
    else:
        return typ


问题


面经


文章

微信
公众号

扫码关注公众号