• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python containers.create_unique_identifier函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中pyon.util.containers.create_unique_identifier函数的典型用法代码示例。如果您正苦于以下问题:Python create_unique_identifier函数的具体用法?Python create_unique_identifier怎么用?Python create_unique_identifier使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了create_unique_identifier函数的18个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: _start_data_subscribers

    def _start_data_subscribers(self, count, raw_count):
        """
        """        
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
                
        # Create streams and subscriptions for each stream named in driver.
        self._data_subscribers = []
        self._samples_received = []
        self._raw_samples_received = []
        self._async_sample_result = AsyncResult()
        self._async_raw_sample_result = AsyncResult()

        # A callback for processing subscribed-to data.
        def recv_data(message, stream_route, stream_id):
            log.info('Received parsed data on %s (%s,%s)', stream_id, stream_route.exchange_point, stream_route.routing_key)
            self._samples_received.append(message)
            if len(self._samples_received) == count:
                self._async_sample_result.set()

        def recv_raw_data(message, stream_route, stream_id):
            log.info('Received raw data on %s (%s,%s)', stream_id, stream_route.exchange_point, stream_route.routing_key)
            self._raw_samples_received.append(message)
            if len(self._raw_samples_received) == raw_count:
                self._async_raw_sample_result.set()

        from pyon.util.containers import create_unique_identifier

        stream_name = 'parsed'
        parsed_config = self._stream_config[stream_name]
        stream_id = parsed_config['stream_id']
        exchange_name = create_unique_identifier("%s_queue" %
                    stream_name)
        self._purge_queue(exchange_name)
        sub = StandaloneStreamSubscriber(exchange_name, recv_data)
        sub.start()
        self._data_subscribers.append(sub)
        sub_id = pubsub_client.create_subscription(name=exchange_name, stream_ids=[stream_id])
        pubsub_client.activate_subscription(sub_id)
        sub.subscription_id = sub_id # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
        
        stream_name = 'raw'
        parsed_config = self._stream_config[stream_name]
        stream_id = parsed_config['stream_id']
        exchange_name = create_unique_identifier("%s_queue" %
                    stream_name)
        self._purge_queue(exchange_name)
        sub = StandaloneStreamSubscriber(exchange_name, recv_raw_data)
        sub.start()
        self._data_subscribers.append(sub)
        sub_id = pubsub_client.create_subscription(name=exchange_name, stream_ids=[stream_id])
        pubsub_client.activate_subscription(sub_id)
        sub.subscription_id = sub_id # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
开发者ID:ednad,项目名称:coi-services,代码行数:53,代码来源:test_vel3d.py


示例2: create_event_process_definition

    def create_event_process_definition(self, version='', module='', class_name='', uri='', arguments=None, event_types = None, sub_types = None, origin_types = None):
        """
        Create a resource which defines the processing of events.

        @param version str
        @param module str
        @param class_name str
        @param uri str
        @param arguments list

        @return procdef_id str
        """

        # Create the event process detail object
        event_process_definition_detail = EventProcessDefinitionDetail()
        event_process_definition_detail.event_types = event_types
        event_process_definition_detail.sub_types = sub_types
        event_process_definition_detail.origin_types = origin_types

        # Create the process definition
        process_definition = ProcessDefinition(name=create_unique_identifier('event_process'))
        process_definition.executable = {
            'module':module,
            'class': class_name,
            'url': uri
        }
        process_definition.version = version
        process_definition.arguments = arguments
        process_definition.definition = event_process_definition_detail

        procdef_id = self.clients.process_dispatcher.create_process_definition(process_definition=process_definition)

        return procdef_id
开发者ID:Bobfrat,项目名称:coi-services,代码行数:33,代码来源:event_management_service.py


示例3: create_stream_definition

    def create_stream_definition(self, name='', parameter_dictionary=None, parameter_dictionary_id='', stream_type='', description='', available_fields=None, stream_configuration=None):
        parameter_dictionary = parameter_dictionary or {}
        stream_configuration = stream_configuration or {}
        existing = self.clients.resource_registry.find_resources(restype=RT.StreamDefinition, name=name, id_only=True)[0]
        if name and existing:
            if parameter_dictionary_id:
                pdict_ids, _ = self.clients.resource_registry.find_objects(subject=existing[0], predicate=PRED.hasParameterDictionary, id_only=True)
                if pdict_ids and parameter_dictionary_id==pdict_ids[0]:
                    return existing[0]
                else:
                    raise Conflict('StreamDefinition with the specified name already exists. (%s)' % name)

            stream_def = self.read_stream_definition(existing[0])
            if self._compare_pdicts(parameter_dictionary,stream_def.parameter_dictionary):
                return existing[0]
            raise Conflict('StreamDefinition with the specified name already exists. (%s)' % name)

        name = name or create_unique_identifier()

        stream_definition = StreamDefinition(parameter_dictionary=parameter_dictionary, stream_type=stream_type, name=name, description=description, available_fields=available_fields, stream_configuration=stream_configuration)
        stream_definition_id,_  = self.clients.resource_registry.create(stream_definition)
        if parameter_dictionary_id:
            self._associate_pdict_with_definition(parameter_dictionary_id, stream_definition_id)

        return stream_definition_id
开发者ID:Bobfrat,项目名称:coi-services,代码行数:25,代码来源:pubsub_management_service.py


示例4: create_stream

    def create_stream(self, name='', exchange_point='', topic_ids=None, credentials=None, stream_definition_id='', description='', stream_name='', stream_type=''):
        # Argument Validation
        if name and self.clients.resource_registry.find_resources(restype=RT.Stream, name=name, id_only=True)[0]:
            raise Conflict("The named stream '%s' already exists on XP '%s'" % (name, exchange_point))
        validate_true(exchange_point, 'An exchange point must be specified')

        exchange_point_id = None
        if re.match(r'[0-9a-f]{32}', exchange_point):  # It's a uuid
            xp_obj = self.clients.exchange_management.read_exchange_point(exchange_point)
            exchange_point_id = exchange_point
            exchange_point = xp_obj.name
        else:
            self.container.ex_manager.create_xp(exchange_point)
            xp_objs, _ = self.clients.resource_registry.find_resources(restype=RT.ExchangePoint, name=exchange_point, id_only=True)
            if not xp_objs:
                raise BadRequest('failed to create an ExchangePoint: ' + exchange_point)
            exchange_point_id = xp_objs[0]

        topic_ids = topic_ids or []

        if not name: name = create_unique_identifier()

        # Get topic names and topics
        topic_names = []
        associated_topics = []
        for topic_id in topic_ids:
            topic = self.read_topic(topic_id)
            if topic.exchange_point == exchange_point:
                topic_names.append(self._sanitize(topic.name))
                associated_topics.append(topic_id)
            else:
                log.warning('Attempted to attach stream %s to topic %s with different exchange points', name, topic.name)

        stream = Stream(name=name, description=description)
        routing_key = '.'.join([self._sanitize(name)] + topic_names + ['stream'])
        if len(routing_key) > 255:
            raise BadRequest('There are too many topics for this.')

        stream.stream_route.exchange_point = exchange_point
        stream.stream_route.routing_key = routing_key
        #@todo: validate credentials
        stream.stream_route.credentials = credentials
        stream.stream_name = stream_name
        stream.stream_type = stream_type

        stream_id, rev = self.clients.resource_registry.create(stream)

        self._associate_stream_with_exchange_point(stream_id,exchange_point_id)

        if stream_definition_id: #@Todo: what if the stream has no definition?!
            self._associate_stream_with_definition(stream_id, stream_definition_id)

        for topic_id in associated_topics:
            self._associate_topic_with_stream(topic_id, stream_id)

        log.info('Stream %s: %s', name, routing_key)

        return stream_id, stream.stream_route
开发者ID:birdage,项目名称:coi-services,代码行数:58,代码来源:pubsub_management_service.py


示例5: create_topic

    def create_topic(self, name='', exchange_point='', parent_topic_id='', description=''):
        validate_true(exchange_point, 'An exchange point must be provided for the topic')
        name = name or create_unique_identifier()
        topic = Topic(name=name, description=description, exchange_point=exchange_point)
        if parent_topic_id:
            parent_topic = self.read_topic(parent_topic_id)
            validate_equal(exchange_point, parent_topic.exchange_point, 'Can not make a sub-topic with a different exchange point')
        topic_id, rev = self.clients.resource_registry.create(topic)

        if parent_topic_id:
            self._associate_topic_with_topic(parent_topic_id, topic_id)

        return topic_id
开发者ID:kerfoot,项目名称:coi-services,代码行数:13,代码来源:pubsub_management_service.py


示例6: create_subscription

    def create_subscription(self, name='', stream_ids=None, exchange_points=None, topic_ids=None, exchange_name='', credentials=None, description='', data_product_ids=[]):
        stream_ids       = stream_ids or []
        exchange_points  = exchange_points or []
        topic_ids        = topic_ids or []
        data_product_ids = data_product_ids or []

        exchange_name = exchange_name or name
        validate_true(exchange_name, 'Clients must provide an exchange name')
        log.info('Creating Subscription %s for %s <- %s', name, exchange_name, stream_ids or exchange_points or topic_ids)

        if not name: name = create_unique_identifier()

        if stream_ids:
            validate_is_instance(stream_ids, list, 'stream ids must be in list format')

        if exchange_points:
            validate_is_instance(exchange_points, list, 'exchange points must be in list format')

        if topic_ids:
            validate_is_instance(topic_ids, list, 'topic ids must be in list format')


        subscription = Subscription(name=name, description=description)
        subscription.exchange_points = exchange_points
        subscription.exchange_name   = exchange_name

        subscription_id, rev = self.clients.resource_registry.create(subscription)
        self.container.ex_manager.create_xn_queue(exchange_name)
        xn_ids, _ = self.clients.resource_registry.find_resources(restype=RT.ExchangeName, name=exchange_name, id_only=True)
        if xn_ids:
            xn_id = xn_ids[0]
            self.clients.resource_registry.create_association(xn_id, PRED.hasSubscription, subscription_id)

        #---------------------------------
        # Associations
        #---------------------------------

        for stream_id in stream_ids:
            self._associate_stream_with_subscription(stream_id, subscription_id)
        
        for topic_id in topic_ids:
            self._associate_topic_with_subscription(topic_id, subscription_id)

        for data_product_id in data_product_ids:
            self._associate_data_product_with_subscription(data_product_id, subscription_id)
        
        return subscription_id
开发者ID:Bobfrat,项目名称:coi-services,代码行数:47,代码来源:pubsub_management_service.py


示例7: _start_data_subscribers

    def _start_data_subscribers(self, count):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)

        # Create streams and subscriptions for each stream named in driver.
        self._data_subscribers = []
        self._samples_received = []
        self._async_sample_result = AsyncResult()

        # A callback for processing subscribed-to data.
        def recv_data(message, stream_route, stream_id):
            log.info("Received message on %s (%s,%s)", stream_id, stream_route.exchange_point, stream_route.routing_key)
            self._samples_received.append(message)
            if len(self._samples_received) == count:
                self._async_sample_result.set()

        for (stream_name, stream_config) in self._stream_config.iteritems():

            stream_id = stream_config["stream_id"]

            # Create subscriptions for each stream.

            from pyon.util.containers import create_unique_identifier

            # exchange_name = '%s_queue' % stream_name
            exchange_name = create_unique_identifier("%s_queue" % stream_name)
            self._purge_queue(exchange_name)
            sub = StandaloneStreamSubscriber(exchange_name, recv_data)
            sub.start()
            self._data_subscribers.append(sub)
            print "stream_id: %s" % stream_id
            sub_id = pubsub_client.create_subscription(name=exchange_name, stream_ids=[stream_id])
            pubsub_client.activate_subscription(sub_id)
            sub.subscription_id = (
                sub_id
            )  # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
开发者ID:blazetopher,项目名称:coi-services,代码行数:38,代码来源:test_ia_alarms.py


示例8: create_subscription

    def create_subscription(self, name='', stream_ids=None, exchange_points=None, topic_ids=None, exchange_name='', credentials=None, description=''):
        stream_ids      = stream_ids or []
        exchange_points = exchange_points or []
        topic_ids       = topic_ids or []

        exchange_name = exchange_name or name
        validate_true(exchange_name, 'Clients must provide an exchange name')
        log.info('Creating Subscription %s for %s <- %s', name, exchange_name, stream_ids or exchange_points or topic_ids)

        if not name: name = create_unique_identifier()

        if stream_ids:
            validate_is_instance(stream_ids, list, 'stream ids must be in list format')

        if exchange_points:
            validate_is_instance(exchange_points, list, 'exchange points must be in list format')

        if topic_ids:
            validate_is_instance(topic_ids, list, 'topic ids must be in list format')


        subscription = Subscription(name=name, description=description)
        subscription.exchange_points = exchange_points
        subscription.exchange_name   = exchange_name

        subscription_id, rev = self.clients.resource_registry.create(subscription)

        #---------------------------------
        # Associations
        #---------------------------------
        
        for stream_id in stream_ids:
            self._associate_stream_with_subscription(stream_id, subscription_id)
        
        for topic_id in topic_ids:
            self._associate_topic_with_subscription(topic_id, subscription_id)
        
        return subscription_id
开发者ID:kerfoot,项目名称:coi-services,代码行数:38,代码来源:pubsub_management_service.py


示例9: template_tst_deployment_context

    def template_tst_deployment_context(self, context=None):
        """
        Creates a minimal deployment: 1 instrument, 1 site.  deployment context must be provided
        """
        c = self.client

        c2 = DotDict()
        c2.resource_registry = self.client.RR

        log.info("Create a instrument model")
        instrument_model_id = self.perform_fcruf_script(RT.InstrumentModel,
                                                      "instrument_model",
                                                      self.client.IMS)

        log.info("Create an instrument device")
        instrument_device_id = self.perform_fcruf_script(RT.InstrumentDevice,
                                                         "instrument_device",
                                                         self.client.IMS)

        log.info("Create instrument site")
        instrument_site_id = self.perform_fcruf_script(RT.InstrumentSite,
                                                       "instrument_site",
                                                       self.client.OMS)

        log.info("Associate instrument model with instrument site")
        self.perform_association_script(c.OMS.assign_instrument_model_to_instrument_site,
                                        self.RR2.find_instrument_sites_by_instrument_model_using_has_model,
                                        self.RR2.find_instrument_models_of_instrument_site_using_has_model,
                                        instrument_site_id,
                                        instrument_model_id)


        log.info("Associate instrument model with instrument device")
        self.perform_association_script(c.IMS.assign_instrument_model_to_instrument_device,
                                        c.IMS.find_instrument_device_by_instrument_model,
                                        c.IMS.find_instrument_model_by_instrument_device,
                                        instrument_device_id,
                                        instrument_model_id)


        log.info("Create a stream definition for the data from the ctd simulator")
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.client.PSMS.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id)

        log.info("Create an IonObject for a data products")
        dp_obj = self.create_data_product_obj()

        dp_obj.name = create_unique_identifier('Inst Data Product')
        inst_data_product_id = c.DPMS.create_data_product(dp_obj, ctd_stream_def_id)


        #assign data products appropriately
        c.DAMS.assign_data_product(input_resource_id=instrument_device_id,
                                   data_product_id=inst_data_product_id)


        deployment_obj = any_old(RT.Deployment, dict(context=context))
        deployment_id = c.OMS.create_deployment(deployment_obj)

        c.OMS.assign_site_to_deployment(instrument_site_id, deployment_id)
        c.OMS.assign_device_to_deployment(instrument_device_id, deployment_id)

        c.OMS.activate_deployment(deployment_id, True)

        # cleanup
        self.RR2.pluck(instrument_model_id)
        self.RR2.pluck(deployment_id)
        self.RR2.pluck(instrument_device_id)
        c.IMS.force_delete_instrument_model(instrument_model_id)
        c.IMS.force_delete_instrument_device(instrument_device_id)
        c.OMS.force_delete_instrument_site(instrument_site_id)
        c.OMS.force_delete_deployment(deployment_id)
开发者ID:birdage,项目名称:coi-services,代码行数:72,代码来源:test_assembly.py


示例10: create_data_process_workflow

    def create_data_process_workflow(self, workflow_definition_id='', input_data_product_id=''):
        """Instantiates a Data Process Workflow specified by a Workflow Definition resource and an input data product id.
      Returns the id of the workflow and the data product id for the final output product.

        @param workflow_definition_id    str
        @param input_data_product_id    str
        @retval workflow_id str
        @retval output_data_product_id    str
        @throws BadRequest    if any of the required parameters are not set
        @throws NotFound    object with specified id does not exist
        """

        if not workflow_definition_id:
            raise BadRequest("The workflow_definition_id parameter is missing")

        workflow_definition = self.clients.resource_registry.read(workflow_definition_id)
        if not workflow_definition:
            raise NotFound("WorkflowDefinition %s does not exist" % workflow_definition_id)

        if not input_data_product_id:
            raise BadRequest("The input_data_product_id parameter is missing")

        input_data_product = self.clients.resource_registry.read(input_data_product_id)
        if not input_data_product:
            raise NotFound("The input data product %s does not exist" % input_data_product_id)


        #Create Workflow object and associations to track the instantiation of a work flow definition.
        workflow = IonObject(RT.Workflow, name=workflow_definition.name)
        workflow_id, _ = self.clients.resource_registry.create(workflow)
        self.clients.resource_registry.create_association(workflow_id, PRED.hasDefinition,workflow_definition_id )
        self.clients.resource_registry.create_association(workflow_id, PRED.hasInputProduct,input_data_product_id )

        #Setup the input data product id as the initial input product stream
        data_process_input_dp_id = input_data_product_id

        output_data_product_id = None

        #Iterate through the workflow steps to setup the data processes and connect them together.
        for wf_step in workflow_definition.workflow_steps:
            log.debug("wf_step.data_process_definition_id: " + wf_step.data_process_definition_id)

            data_process_definition = self.clients.resource_registry.read(wf_step.data_process_definition_id)

            # Find the link between the output Stream Definition resource and the Data Process Definition resource
            stream_ids,_ = self.clients.resource_registry.find_objects(data_process_definition._id, PRED.hasStreamDefinition, RT.StreamDefinition,  id_only=True)
            if not stream_ids:
                raise Inconsistent("The data process definition %s is missing an association to an output stream definition" % data_process_definition._id )
            process_output_stream_def_id = stream_ids[0]

            #If an output name has been specified than use it for the final output product name
            if wf_step.output_data_product_name is not '':
                data_product_name = wf_step.output_data_product_name
            else:
                #Concatenate the name of the workflow and data process definition for the name of the data product output + plus
                #a unique identifier for multiple instances of a workflow definition.
                data_product_name = create_unique_identifier(workflow_definition.name + '_' + data_process_definition.name)

            # Create the output data product of the transform
            transform_dp_obj = IonObject(RT.DataProduct, name=data_product_name,description=data_process_definition.description)
            transform_dp_id = self.clients.data_product_management.create_data_product(transform_dp_obj, process_output_stream_def_id)
            if wf_step.persist_process_output_data:
                self.clients.data_product_management.activate_data_product_persistence(data_product_id=transform_dp_id, persist_data=wf_step.persist_process_output_data, persist_metadata=wf_step.persist_process_output_data)

            #Associate the intermediate data products with the workflow
            self.clients.resource_registry.create_association(workflow_id, PRED.hasDataProduct, transform_dp_id )

            # Create the  transform data process
            log.debug("create data_process and start it")
            data_process_id = self.clients.data_process_management.create_data_process(data_process_definition._id, [data_process_input_dp_id], {'output':transform_dp_id}, configuration=wf_step.configuration)
            self.clients.data_process_management.activate_data_process(data_process_id)

            #Track the the data process with an association to the workflow
            self.clients.resource_registry.create_association(workflow_id, PRED.hasDataProcess, data_process_id )

            #last one out of the for loop is the output product id
            output_data_product_id = transform_dp_id

            #Save the id of the output data stream for input to the next process in the workflow.
            data_process_input_dp_id = transform_dp_id


        #Track the output data product with an association
        self.clients.resource_registry.create_association(workflow_id, PRED.hasOutputProduct, output_data_product_id )

        return workflow_id, output_data_product_id
开发者ID:ooici-eoi,项目名称:coi-services,代码行数:86,代码来源:workflow_management_service.py


示例11: test_create_unique_identifier

 def test_create_unique_identifier(self):
     id = create_unique_identifier("abc123")
     self.assertIn("abc123", id)
     id = create_unique_identifier()
     self.assertNotIn("abc123", id)
开发者ID:pkediyal,项目名称:pyon,代码行数:5,代码来源:test_containers.py


示例12: initiate_realtime_visualization

    def initiate_realtime_visualization(self, data_product_id='', visualization_parameters=None, callback=""):
        """Initial request required to start a realtime chart for the specified data product. Returns a user specific token associated
        with the request that will be required for subsequent requests when polling data.
        
        Note : The in_product_type is a temporary fix for specifying the type of data product to the the service

        @param data_product_id    str
        @param in_product_type str
        @param query        str
        @param callback     str
        @throws NotFound    Throws if specified data product id or its visualization product does not exist
        """


        query = None
        if visualization_parameters:
            if visualization_parameters.has_key('query'):
                query=visualization_parameters['query']

        # Perform a look up to check and see if the DP is indeed a realtime GDT stream
        if not data_product_id:
            raise BadRequest("The data_product_id parameter is missing")
        data_product = self.clients.resource_registry.read(data_product_id)
        
        if not data_product:
            raise NotFound("Data product %s does not exist" % data_product_id)

        data_product_stream_id = None
        workflow_def_id = None

        # Check to see if the workflow defnition already exist
        workflow_def_ids,_ = self.clients.resource_registry.find_resources(restype=RT.WorkflowDefinition, name='Realtime_Google_DT', id_only=True)

        if len(workflow_def_ids) > 0:
            workflow_def_id = workflow_def_ids[0]
        else:
            workflow_def_id = self._create_google_dt_workflow_def()

        #Create and start the workflow
        workflow_id, workflow_product_id = self.clients.workflow_management.create_data_process_workflow(workflow_def_id, data_product_id, timeout=20)

        # detect the output data product of the workflow
        workflow_dp_ids,_ = self.clients.resource_registry.find_objects(workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        if len(workflow_dp_ids) != 1:
            raise ValueError("Workflow Data Product ids representing output DP must contain exactly one entry")

        # find associated stream id with the output
        workflow_output_stream_ids, _ = self.clients.resource_registry.find_objects(workflow_dp_ids[len(workflow_dp_ids) - 1], PRED.hasStream, None, True)
        data_product_stream_id = workflow_output_stream_ids

        # Create a queue to collect the stream granules - idempotency saves the day!
        query_token = create_unique_identifier('user_queue')

        xq = self.container.ex_manager.create_xn_queue(query_token)
        subscription_id = self.clients.pubsub_management.create_subscription(
            stream_ids=data_product_stream_id,
            exchange_name = query_token,
            name = query_token
        )

        # after the queue has been created it is safe to activate the subscription
        self.clients.pubsub_management.activate_subscription(subscription_id)

        if callback == "":
            return query_token
        else:
            return callback + "(\"" + query_token + "\")"
开发者ID:kerfoot,项目名称:coi-services,代码行数:67,代码来源:visualization_service.py


示例13: create_data_process

    def create_data_process(self, data_process_definition_id='', in_data_product_ids=None, out_data_products=None, configuration=None):
        """
        @param  data_process_definition_id: Object with definition of the
                    transform to apply to the input data product
        @param  in_data_product_ids: ID of the input data products
        @param  out_data_products: list of IDs of the output data products
        @retval data_process_id: ID of the newly created data process object
        """

        inform = "Input Data Product:       "+str(in_data_product_ids)+\
                 "Transformed by:           "+str(data_process_definition_id)+\
                 "To create output Product: "+str(out_data_products)
        log.debug("DataProcessManagementService:create_data_process()\n" +
                  inform)

        if configuration is None:
            configuration = {}

        if in_data_product_ids is None:
            in_data_product_ids = []

        if out_data_products is None:
            out_data_products = {}

        # Create and store a new DataProcess with the resource registry
        log.debug("DataProcessManagementService:create_data_process - Create and store a new DataProcess with the resource registry")
        data_process_def_obj = self.read_data_process_definition(data_process_definition_id)

        data_process_name = create_unique_identifier("process_" + data_process_def_obj.name)

        self.data_process = IonObject(RT.DataProcess, name=data_process_name)
        data_process_id, version = self.clients.resource_registry.create(self.data_process)
        log.debug("DataProcessManagementService:create_data_process - Create and store a new DataProcess with the resource registry  data_process_id: %s" +  str(data_process_id))


        # Associate with dataProcessDefinition
        aid = self.clients.resource_registry.create_association(data_process_id,  PRED.hasProcessDefinition, data_process_definition_id)
        log.debug("DataProcessManagementService:create_data_process assoc to data process definition: %s", str(aid))

        # Register the data process instance as a data producer with DataAcquisitionMgmtSvc
        data_producer_id = self.clients.data_acquisition_management.register_process(data_process_id)
        log.debug("DataProcessManagementService:create_data_process register process with DataAcquisitionMgmtSvc: data_producer_id: %s   (L4-CI-SA-RQ-181)", str(data_producer_id) )


        # Register each output data product with DAMS to create DataProducer links
        output_stream_dict = {}
        if out_data_products is None:
            raise BadRequest("Data Process must have output product(s) specified %s",  str(data_process_definition_id) )
        for name, out_data_product_id in out_data_products.iteritems():

            # check that the product is not already associated with a producer
            producer_ids, _ = self.clients.resource_registry.find_objects(out_data_product_id, PRED.hasDataProducer, RT.DataProducer, True)
            if producer_ids:
                raise BadRequest("Data Product should not already be associated to a DataProducer %s hasDataProducer %s", str(data_process_id), str(producer_ids[0]))

            #Assign each output Data Product to this producer resource
            out_data_product_obj = self.clients.resource_registry.read(out_data_product_id)
            if not out_data_product_obj:
                raise NotFound("Output Data Product %s does not exist" % out_data_product_id)
            # Associate with DataProcess: register as an output product for this process
            log.debug("DataProcessManagementService:create_data_process link data process %s and output out data product: %s    (L4-CI-SA-RQ-260)", str(data_process_id), str(out_data_product_id))
            self.clients.data_acquisition_management.assign_data_product(data_process_id, out_data_product_id)

            # Retrieve the id of the OUTPUT stream from the out Data Product
            stream_ids, _ = self.clients.resource_registry.find_objects(out_data_product_id, PRED.hasStream, RT.Stream, True)

            log.debug("DataProcessManagementService:create_data_process retrieve out data prod streams: %s", str(stream_ids))
            if not stream_ids:
                raise NotFound("No Stream created for output Data Product " + str(out_data_product_id))
            if len(stream_ids) != 1:
                raise BadRequest("Data Product should only have ONE stream at this time" + str(out_data_product_id))
            output_stream_dict[name] = stream_ids[0]
            log.debug("DataProcessManagementService:create_data_process -Register the data process instance as a data producer with DataAcquisitionMgmtSvc, then retrieve the id of the OUTPUT stream  out_stream_id: " +  str(output_stream_dict[name]))


        #check if data process definition has lookup tables attached that should be passed to the tansfrom as part of configuration
        self._find_lookup_tables(data_process_definition_id, configuration)


        #Todo: currently this is handled explicitly after creating the data product, that code then calls DMAS:assign_data_product
        log.debug("DataProcessManagementService:create_data_process associate data process workflows with source data products %s hasInputProducts  %s   (L4-CI-SA-RQ-260)", str(data_process_id), str(in_data_product_ids))
        for  in_data_product_id in in_data_product_ids:
            self.clients.resource_registry.create_association(data_process_id, PRED.hasInputProduct, in_data_product_id)

            #check if in data product is attached to an instrument, check instrumentDevice and InstrumentModel for lookup table attachments
            instdevice_ids, _ = self.clients.resource_registry.find_subjects(RT.InstrumentDevice, PRED.hasOutputProduct, in_data_product_id, True)
            for instdevice_id in instdevice_ids:
                log.debug("DataProcessManagementService:create_data_process instrument device_id assoc to the input data product of this data process: %s   (L4-CI-SA-RQ-231)", str(instdevice_id))
                self._find_lookup_tables(instdevice_id, configuration)
                instmodel_ids, _ = self.clients.resource_registry.find_objects(instdevice_id, PRED.hasModel, RT.InstrumentModel, True)
                for instmodel_id in instmodel_ids:
                    log.debug("DataProcessManagementService:create_data_process instmodel_id assoc to the instDevice: %s", str(instmodel_id))
                    self._find_lookup_tables(instmodel_id, configuration)

        # Create subscription from in_data_product, which should already be associated with a stream via the Data Producer
        in_stream_ids = []
        # get the streams associated with this IN data products
        for  in_data_product_id in in_data_product_ids:
            log.debug("DataProcessManagementService:create_data_process - get the stream associated with this IN data product")
            stream_ids, _ = self.clients.resource_registry.find_objects(in_data_product_id, PRED.hasStream, RT.Stream, True)
#.........这里部分代码省略.........
开发者ID:pombredanne,项目名称:coi-services,代码行数:101,代码来源:data_process_management_service.py


示例14: create_data_process_workflow

    def create_data_process_workflow(self, workflow_definition_id='', input_data_product_id='', persist_workflow_data_product=True, output_data_product_name='', configuration={}):
        """Instantiates a Data Process Workflow specified by a Workflow Definition resource and an input data product id.
        Returns the id of the workflow and the data product id for the final output product.

        @param workflow_definition_id    str
        @param input_data_product_id    str
        @param persist_workflow_data_product    bool
        @param output_data_product_name    str
        @param configuration    IngestionConfiguration
        @retval workflow_id    str
        @retval output_data_product_id    str
        @throws BadRequest    if any of the required parameters are not set
        @throws NotFound    object with specified id does not exist
        """

        if not workflow_definition_id:
            raise BadRequest("The workflow_definition_id parameter is missing")

        workflow_definition = self.clients.resource_registry.read(workflow_definition_id)
        if not workflow_definition:
            raise NotFound("WorkflowDefinition %s does not exist" % workflow_definition_id)

        if not input_data_product_id:
            raise BadRequest("The input_data_product_id parameter is missing")

        input_data_product = self.clients.resource_registry.read(input_data_product_id)
        if not input_data_product:
            raise NotFound("The input data product %s does not exist" % input_data_product_id)

        if output_data_product_name:
            workflow_name = '%s_%s' % (workflow_definition.name, output_data_product_name)
        else:
            workflow_name = create_unique_identifier('workflow_%s' % (workflow_definition.name))

         #Create Workflow object and associations to track the instantiation of a work flow definition.
        workflow = IonObject(RT.Workflow, name=workflow_name, persist_process_output_data=persist_workflow_data_product,
                                output_data_product_name=output_data_product_name, configuration=configuration)
        workflow_id, _ = self.clients.resource_registry.create(workflow)
        self.clients.resource_registry.create_association(workflow_id, PRED.hasDefinition,workflow_definition_id )
        self.clients.resource_registry.create_association(workflow_id, PRED.hasInputProduct,input_data_product_id )

        #Setup the input data product id as the initial input product stream
        data_process_input_dp_id = input_data_product_id

        output_data_products = {}
        output_data_product_id = None # Overall product id to return

        #Iterate through the workflow steps to setup the data processes and connect them together.
        for wf_step in workflow_definition.workflow_steps:
            log.debug("wf_step.data_process_definition_id: %s" , wf_step.data_process_definition_id)

            data_process_definition = self.clients.resource_registry.read(wf_step.data_process_definition_id)

            for binding, stream_definition_id in data_process_definition.output_bindings.iteritems():

                #--------------------------------------------------------------------------------
                # Create an output data product for each binding/stream definition
                #--------------------------------------------------------------------------------

                #Handle the last step differently as it is the output of the workflow.
                if wf_step == workflow_definition.workflow_steps[-1] and output_data_product_name:
                    data_product_name = output_data_product_name
                else:
                    data_product_name = create_unique_identifier('%s_%s_%s' % (workflow_name, data_process_definition.name, binding))

                tdom, sdom = time_series_domain()

                data_product_obj = IonObject(RT.DataProduct, 
                                             name            = data_product_name,
                                             description     = data_process_definition.description,
                                             temporal_domain = tdom.dump(),
                                             spatial_domain  = sdom.dump())
                data_product_id = self.clients.data_product_management.create_data_product(data_product_obj, stream_definition_id=stream_definition_id)


                # Persist if necessary
                if wf_step == workflow_definition.workflow_steps[-1] and persist_workflow_data_product:
                    self.clients.data_product_management.activate_data_product_persistence(data_product_id=data_product_id)
                else:
                    if wf_step.persist_process_output_data:
                        self.clients.data_product_management.activate_data_product_persistence(data_product_id=data_product_id)


                #Associate the intermediate data products with the workflow
                self.clients.resource_registry.create_association(workflow_id, PRED.hasDataProduct, data_product_id )
                output_data_products[binding] = data_product_id

            #May have to merge configuration blocks where the workflow entries will override the configuration in a step
            if configuration:
                process_config = dict(wf_step.configuration.items() + configuration.items())
            else:
                process_config = wf_step.configuration

            data_process_id = self.clients.data_process_management.create_data_process(data_process_definition._id, [data_process_input_dp_id], output_data_products, configuration=process_config)
            self.clients.data_process_management.activate_data_process(data_process_id)

            #Track the the data process with an association to the workflow
            self.clients.resource_registry.create_association(workflow_id, PRED.hasDataProcess, data_process_id )

            #last one out of the for loop is the output product id
#.........这里部分代码省略.........
开发者ID:blazetopher,项目名称:coi-services,代码行数:101,代码来源:workflow_management_service.py


示例15: get_data_product_kml

    def get_data_product_kml(self, visualization_parameters = None):

        kml_content = ""
        ui_server = "http://localhost:3000" # This server hosts the UI and is used for creating all embedded links within KML
        #observatory_icon_file = "/static/img/r2/li.observatories.png"
        starting_altitude = "20000"
        observatory_icon_file = "/static/img/Observatory-icon1.png"

        if visualization_parameters:
            if "ui_server" in visualization_parameters:
                ui_server = visualization_parameters["ui_server"]

        # First step. Discover all Data products in the system
        dps,_ = self.clients.resource_registry.find_resources(RT.DataProduct, None, None, False)

        # Start creating the kml in memory
        # Common KML tags
        kml_content += "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
        kml_content += "<kml xmlns=\"http://www.opengis.net/kml/2.2\">\n"
        kml_content += "<Document>\n"
        kml_content += "<name>DataProduct geo-reference information, ID: " + str(create_unique_identifier('google_flush_key')) + "</name>\n"
        # define line styles. Used for polygons
        kml_content += "<Style id=\"yellowLine\">\n<LineStyle>\n<color>ff61f2f2</color>\n<width>4</width>\n</LineStyle>\n</Style>"

        # Embed the icon images. Each as a separate style
        kml_content += "<Style id=\"observatory-icon\">\n<IconStyle>\n<Icon>\n<href>"
        kml_content += ui_serve 

鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python containers.dict_merge函数代码示例发布时间:2022-05-27
下一篇:
Python arg_check.validate_true函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap