• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python dao.store_entity函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中tvb.core.entities.storage.dao.store_entity函数的典型用法代码示例。如果您正苦于以下问题:Python store_entity函数的具体用法?Python store_entity怎么用?Python store_entity使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了store_entity函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: upgrade_file

    def upgrade_file(self, input_file_name, datatype=None):
        """
        Upgrades the given file to the latest data version. The file will be upgraded
        sequentially, up until the current version from tvb.basic.config.settings.VersionSettings.DB_STRUCTURE_VERSION
        
        :param input_file_name the path to the file which needs to be upgraded
        :return True, when update was needed and running it was successful.
        False, the the file is already up to date.

        """
        if self.is_file_up_to_date(input_file_name):
            # Avoid running the DB update of size, when H5 is not being changed, to speed-up
            return False

        file_version = self.get_file_data_version(input_file_name)
        self.log.info("Updating from version %s , file: %s " % (file_version, input_file_name))
        for script_name in self.get_update_scripts(file_version):
            self.run_update_script(script_name, input_file=input_file_name)

        if datatype:
            # Compute and update the disk_size attribute of the DataType in DB:
            datatype.disk_size = self.files_helper.compute_size_on_disk(input_file_name)
            dao.store_entity(datatype)

        return True
开发者ID:gummadhav,项目名称:tvb-framework,代码行数:25,代码来源:files_update_manager.py


示例2: stop_operation

    def stop_operation(operation_id):
        """
        Stop a thread for a given operation id
        """
        operation = dao.get_operation_by_id(operation_id)
        if not operation or operation.status != model.STATUS_STARTED:
            LOGGER.warning("Operation already stopped or not found is given to stop job: %s" % operation_id)
            return True

        LOGGER.debug("Stopping operation: %s" % str(operation_id))

        ## Set the thread stop flag to true
        for thread in CURRENT_ACTIVE_THREADS:
            if int(thread.operation_id) == operation_id:
                thread.stop()
                LOGGER.debug("Found running thread for operation: %d" % operation_id)

        ## Kill Thread
        stopped = True
        operation_process = dao.get_operation_process_for_operation(operation_id)
        if operation_process is not None:
            ## Now try to kill the operation if it exists
            stopped = OperationExecutor.stop_pid(operation_process.pid)
            if not stopped:
                LOGGER.debug("Operation %d was probably killed from it's specific thread." % operation_id)
            else:
                LOGGER.debug("Stopped OperationExecutor process for %d" % operation_id)

        ## Mark operation as canceled in DB.
        operation.mark_cancelled()
        dao.store_entity(operation)
        return stopped
开发者ID:HuifangWang,项目名称:the-virtual-brain-website,代码行数:32,代码来源:backend_client.py


示例3: _run_cluster_job

    def _run_cluster_job(operation_identifier, user_name_label, adapter_instance):
        """
        Threaded Popen
        It is the function called by the ClusterSchedulerClient in a Thread.
        This function starts a new process.
        """
        # Load operation so we can estimate the execution time
        operation = dao.get_operation_by_id(operation_identifier)
        kwargs = parse_json_parameters(operation.parameters)
        time_estimate = int(adapter_instance.get_execution_time_approximation(**kwargs))
        hours = int(time_estimate / 3600)
        minutes = (int(time_estimate) % 3600) / 60
        seconds = int(time_estimate) % 60
        # Anything lower than 2 hours just use default walltime
        if hours < 2:
            walltime = "02:00:00"
        elif hours > 23:
            walltime = "23:59:59"
        else:
            walltime = datetime.time(hours, minutes, seconds)
            walltime = walltime.strftime("%H:%M:%S")

        call_arg = config.CLUSTER_SCHEDULE_COMMAND % (walltime, operation_identifier, user_name_label)
        LOGGER.info(call_arg)
        process_ = Popen([call_arg], stdout=PIPE, shell=True)
        job_id = process_.stdout.read().replace('\n', '').split('OAR_JOB_ID=')[-1]
        LOGGER.debug("Got jobIdentifier = %s for CLUSTER operationID = %s" % (operation_identifier, job_id))
        operation_identifier = model.OperationProcessIdentifier(operation_identifier, job_id=job_id)
        dao.store_entity(operation_identifier)
开发者ID:HuifangWang,项目名称:the-virtual-brain-website,代码行数:29,代码来源:backend_client.py


示例4: update

def update():
    """
    Move images previously stored in TVB operation folders, in a single folder/project.
    """
    projects_count = dao.get_all_projects(is_count=True)

    for page_start in range(0, projects_count, PAGE_SIZE):

        projects_page = dao.get_all_projects(page_start=page_start,
                                             page_end=min(page_start + PAGE_SIZE, projects_count))

        for project in projects_page:
            figures = _figures_in_project(project.id)

            for figure in figures:
                figure.file_path = "%s-%s" % (figure.operation.id, figure.file_path)

            dao.store_entities(figures)

            project_path = FilesHelper().get_project_folder(project)
            update_manager = ProjectUpdateManager(project_path)
            update_manager.run_all_updates()

            project.version = TvbProfile.current.version.PROJECT_VERSION
            dao.store_entity(project)
开发者ID:unimauro,项目名称:tvb-framework,代码行数:25,代码来源:5454_update_code.py


示例5: test_launch_operation_HDD_full_space_started_ops

 def test_launch_operation_HDD_full_space_started_ops(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     space_taken_by_started = 100
     module = "tvb.tests.framework.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     started_operation = model.Operation(
         self.test_user.id,
         self.test_project.id,
         group.id,
         "",
         status=model.STATUS_STARTED,
         estimated_disk_size=space_taken_by_started,
     )
     dao.store_entity(started_operation)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test": 100}
     TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started - 1)
     tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
     self.assertRaises(
         NoMemoryAvailableException,
         self.operation_service.initiate_operation,
         self.test_user,
         self.test_project.id,
         adapter,
         tmp_folder,
         **data
     )
     self._assert_no_dt2()
开发者ID:lcosters,项目名称:tvb-framework,代码行数:31,代码来源:operation_service_test.py


示例6: remove_datatype

 def remove_datatype(self, skip_validation = False):
     """
     Called when a TimeSeries is removed.
     """
     associated_cv = dao.get_generic_entity(Covariance, self.handled_datatype.gid, '_source')
     associated_pca = dao.get_generic_entity(PrincipalComponents, self.handled_datatype.gid, '_source')
     associated_is = dao.get_generic_entity(IndependentComponents, self.handled_datatype.gid, '_source')
     associated_cc = dao.get_generic_entity(CrossCorrelation, self.handled_datatype.gid, '_source')
     associated_fr = dao.get_generic_entity(FourierSpectrum, self.handled_datatype.gid, '_source')
     associated_wv = dao.get_generic_entity(WaveletCoefficients, self.handled_datatype.gid, '_source')
     associated_cs = dao.get_generic_entity(CoherenceSpectrum, self.handled_datatype.gid, '_source')
     associated_dm = dao.get_generic_entity(DatatypeMeasure, self.handled_datatype.gid, '_analyzed_datatype')
     for datatype_measure in associated_dm:
         datatype_measure._analyed_datatype = None
         dao.store_entity(datatype_measure)
     msg = "TimeSeries cannot be removed as it is used by at least one "
     if not skip_validation:
         if len(associated_cv) > 0:
             raise RemoveDataTypeException(msg + " Covariance.")
         if len(associated_pca) > 0:
             raise RemoveDataTypeException(msg + " PrincipalComponents.")
         if len(associated_is) > 0:
             raise RemoveDataTypeException(msg + " IndependentComponents.")
         if len(associated_cc) > 0:
             raise RemoveDataTypeException(msg + " CrossCorrelation.")
         if len(associated_fr) > 0:
             raise RemoveDataTypeException(msg + " FourierSpectrum.")
         if len(associated_wv) > 0:
             raise RemoveDataTypeException(msg + " WaveletCoefficients.")
         if len(associated_cs) > 0:
             raise RemoveDataTypeException(msg + " CoherenceSpectrum.")
     ABCRemover.remove_datatype(self, skip_validation)
开发者ID:HuifangWang,项目名称:the-virtual-brain-website,代码行数:32,代码来源:remover_timeseries.py


示例7: test_remove_project_node

    def test_remove_project_node(self):
        """
        Test removing of a node from a project.
        """
        inserted_project, gid, gid_op = self._create_value_wrapper(self.test_user) 
        project_to_link = model.Project("Link", self.test_user.id, "descript")
        project_to_link = dao.store_entity(project_to_link)
        exact_data = dao.get_datatype_by_gid(gid)
        dao.store_entity(model.Links(exact_data.id, project_to_link.id))
        assert dao.get_datatype_by_gid(gid) is not None, "Initialization problem!"
        
        operation_id = dao.get_generic_entity(model.Operation, gid_op, 'gid')[0].id
        op_folder = self.structure_helper.get_project_folder("test_proj", str(operation_id))
        assert os.path.exists(op_folder)
        sub_files = os.listdir(op_folder)
        assert 2 == len(sub_files)
        ### Validate that no more files are created than needed.

        if(dao.get_system_user() is None):
            dao.store_entity(model.User(TvbProfile.current.web.admin.SYSTEM_USER_NAME, None, None, True, None))
        self.project_service._remove_project_node_files(inserted_project.id, gid)
        sub_files = os.listdir(op_folder)
        assert 1 == len(sub_files)
        ### operation.xml file should still be there
        
        op_folder = self.structure_helper.get_project_folder("Link", str(operation_id + 1)) 
        sub_files = os.listdir(op_folder)
        assert 2 == len(sub_files)
        assert dao.get_datatype_by_gid(gid) is not None, "Data should still be in DB, because of links"
        self.project_service._remove_project_node_files(project_to_link.id, gid)
        assert dao.get_datatype_by_gid(gid) is None
        sub_files = os.listdir(op_folder)
        assert 1 == len(sub_files)
开发者ID:maedoc,项目名称:tvb-framework,代码行数:33,代码来源:project_service_test.py


示例8: generate_users

 def generate_users(nr_users, nr_projects):
     """
     The generate_users method will create a clean state db with
     :param nr_users: number of users to be generated (with random roles between
                             CLINICIAN and RESEARCHER and random validated state)
     :param nr_projects: maximum number of projects to be generated for each user
     """
     users = []
     
     for i in range(nr_users):
         coin_flip = random.randint(0, 1)
         role = 'CLINICIAN' if coin_flip == 1 else 'RESEARCHER'
         password = md5("test").hexdigest()
         new_user = model.User("gen" + str(i), password, "[email protected]", True, role)
         dao.store_entity(new_user)
         new_user = dao.get_user_by_name("gen" + str(i))
         ExtremeTestFactory.VALIDATION_DICT[new_user.id] = 0
         users.append(new_user)
         
     for i in range(nr_users):
         current_user = dao.get_user_by_name("gen" + str(i))
         projects_for_user = random.randint(0, nr_projects)
         for j in range(projects_for_user):         
             data = dict(name='GeneratedProject' + str(i) + '_' + str(j),
                         description='test_desc',
                         users=ExtremeTestFactory.get_users_ids(random.randint(0, nr_users - 3),
                                                                nr_users, current_user.id, users))
             ProjectService().store_project(current_user, True, None, **data)
             ExtremeTestFactory.VALIDATION_DICT[current_user.id] += 1 
开发者ID:maedoc,项目名称:tvb-framework,代码行数:29,代码来源:factory.py


示例9: submit

    def submit(self, dynamic_gid, dynamic_name):
        if dao.get_dynamic_by_name(dynamic_name):
            return {'saved': False, 'msg': 'There is another configuration with the same name'}

        dynamic = self.get_cached_dynamic(dynamic_gid)
        model = dynamic.model
        integrator = dynamic.integrator

        model_parameters = []

        for name in model.ui_configurable_parameters:
            value = getattr(model, name)[0]
            model_parameters.append((name, value))

        entity = tvb.core.entities.model.Dynamic(
            dynamic_name,
            common.get_logged_user().id,
            model.__class__.__name__,
            json.dumps(model_parameters),
            integrator.__class__.__name__,
            None
            # todo: serialize integrator parameters
            # json.dumps(integrator.raw_ui_integrator_parameters)
        )

        dao.store_entity(entity)
        return {'saved': True}
开发者ID:amitsaroj001,项目名称:tvb-framework,代码行数:27,代码来源:dynamic_model_controller.py


示例10: __init__

    def __init__(self):
        micro_postfix = "_%d" % int(time.time() * 1000000)

        # Here create all structures needed later for data types creation
        self.files_helper = FilesHelper()

        # First create user 
        user = model.User("datatype_factory_user" + micro_postfix, "test_pass",
                          "[email protected]" + micro_postfix, True, "user")
        self.user = dao.store_entity(user)

        # Now create a project
        project_service = ProjectService()
        data = dict(name='DatatypesFactoryProject' + micro_postfix, description='test_desc', users=[])
        self.project = project_service.store_project(self.user, True, None, **data)

        # Create algorithm
        alg_category = model.AlgorithmCategory('one', True)
        dao.store_entity(alg_category)
        ad = model.Algorithm(SIMULATOR_MODULE, SIMULATOR_CLASS, alg_category.id)
        self.algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS)
        if self.algorithm is None:
            self.algorithm = dao.store_entity(ad)

        # Create an operation
        self.meta = {DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME,
                     DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE}
        operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters',
                                    meta=json.dumps(self.meta), status=model.STATUS_FINISHED)
        self.operation = dao.store_entity(operation)
开发者ID:maedoc,项目名称:tvb-framework,代码行数:30,代码来源:datatypes_factory.py


示例11: upgrade

def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['DATA_TYPES_GROUPS']
    create_column(COL_RANGES_1, table)
    create_column(COL_RANGES_2, table)

    try:
        ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column.
        previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0", "no_of_ranges")

        for group in previous_groups:

            operation_group = dao.get_operationgroup_by_id(group.fk_operation_group)
            #group.only_numeric_ranges = operation_group.has_only_numeric_ranges

            if operation_group.range3 is not None:
                group.no_of_ranges = 3
            elif operation_group.range2 is not None:
                group.no_of_ranges = 2
            elif operation_group.range1 is not None:
                group.no_of_ranges = 1
            else:
                group.no_of_ranges = 0

            dao.store_entity(group)

    except Exception, excep:
        ## we can live with a column only having default value. We will not stop the startup.
        logger = get_logger(__name__)
        logger.exception(excep)
开发者ID:amitsaroj001,项目名称:tvb-framework,代码行数:35,代码来源:004_update_db.py


示例12: reset_password

    def reset_password(self, **data):
        """
        Service Layer for resetting a password.
        """
        if (KEY_EMAIL not in data) or len(data[KEY_EMAIL]) < 1:
            raise UsernameException("Empty Email!")

        old_pass, user = None, None
        try:
            email = data[KEY_EMAIL]
            name_hint = data[KEY_USERNAME]
            user = dao.get_user_by_email(email, name_hint)
            if user is None:
                raise UsernameException("No singular user could be found for the given data!")

            old_pass = user.password
            new_pass = ''.join(chr(randint(48, 122)) for _ in range(DEFAULT_PASS_LENGTH))
            user.password = md5(new_pass).hexdigest()
            self.edit_user(user, old_pass)
            self.logger.info("Resetting password for email : " + email)
            email_sender.send(FROM_ADDRESS, email, SUBJECT_RECOVERY, TEXT_RECOVERY % (user.username, new_pass))
            return TEXT_DISPLAY
        except Exception, excep:
            if old_pass and len(old_pass) > 1 and user:
                user.password = old_pass
                dao.store_entity(user)
            self.logger.exception("Could not change user password!")
            raise UsernameException(excep.message)
开发者ID:sdiazpier,项目名称:tvb-framework,代码行数:28,代码来源:user_service.py


示例13: save_project_to_user

 def save_project_to_user(user_id, project_id):
     """
     Mark for current user that the given project is the last one selected.
     """
     user = dao.get_user_by_id(user_id)
     user.selected_project = project_id
     dao.store_entity(user)
开发者ID:sdiazpier,项目名称:tvb-framework,代码行数:7,代码来源:user_service.py


示例14: create_operation

    def create_operation(algorithm=None, test_user=None, test_project=None, 
                         operation_status=model.STATUS_FINISHED, parameters="test params"):
        """
        Create persisted operation.
        
        :param algorithm: When not None, introspect TVB and TVB_TEST for adapters.
        :return: Operation entity after persistence. 
        """
        if algorithm is None:
            algorithm = dao.get_algorithm_by_module('tvb.tests.framework.adapters.ndimensionarrayadapter',
                                                    'NDimensionArrayAdapter')

        if test_user is None:
            test_user = TestFactory.create_user()
            
        if test_project is None:
            test_project = TestFactory.create_project(test_user)
            
        meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe",
                DataTypeMetaData.KEY_STATE: "RAW_DATA"}
        operation = model.Operation(test_user.id, test_project.id, algorithm.id, parameters, meta=json.dumps(meta),
                                    status=operation_status)
        dao.store_entity(operation)
        ### Make sure lazy attributes are correctly loaded.
        return dao.get_operation_by_id(operation.id)
开发者ID:maedoc,项目名称:tvb-framework,代码行数:25,代码来源:factory.py


示例15: clean_database

    def clean_database(self, delete_folders=True):
        """
        Deletes data from all tables
        """
        self.cancel_all_operations()
        LOGGER.warning("Your Database content will be deleted.")
        try:
            session = SessionMaker()
            for table in reversed(model.Base.metadata.sorted_tables):
                # We don't delete data from some tables, because those are 
                # imported only during introspection which is done one time
                if table.name not in self.EXCLUDE_TABLES:
                    try:
                        session.open_session()
                        con = session.connection()
                        LOGGER.debug("Executing Delete From Table " + table.name)
                        con.execute(table.delete())
                        session.commit()
                    except Exception as e:
                        # We cache exception here, in case some table does not exists and
                        # to allow the others to be deleted
                        LOGGER.warning(e)
                        session.rollback()
                    finally:
                        session.close_session()
            LOGGER.info("Database was cleanup!")
        except Exception as excep:
            LOGGER.warning(excep)
            raise

        # Now if the database is clean we can delete also project folders on disk
        if delete_folders:
            self.delete_project_folders()
        dao.store_entity(model.User(TvbProfile.current.web.admin.SYSTEM_USER_NAME, None, None, True, None))
开发者ID:LauHoiYanGladys,项目名称:tvb-framework,代码行数:34,代码来源:base_testcase.py


示例16: remove_datatype

    def remove_datatype(self, skip_validation=False):
        """
        Called when a Connectivity is to be removed.
        """
        if not skip_validation:
            associated_ts = dao.get_generic_entity(TimeSeriesRegion, self.handled_datatype.gid, '_connectivity')
            associated_rm = dao.get_generic_entity(RegionMapping, self.handled_datatype.gid, '_connectivity')
            associated_stim = dao.get_generic_entity(StimuliRegion, self.handled_datatype.gid, '_connectivity')
            associated_mes = dao.get_generic_entity(ConnectivityMeasure, self.handled_datatype.gid, '_connectivity')
            msg = "Connectivity cannot be removed as it is used by at least one "

            if len(associated_ts) > 0:
                raise RemoveDataTypeException(msg + " TimeSeriesRegion.")
            if len(associated_rm) > 0:
                raise RemoveDataTypeException(msg + " RegionMapping.")
            if len(associated_stim) > 0:
                raise RemoveDataTypeException(msg + " StimuliRegion.")
            if len(associated_mes) > 0:
                raise RemoveDataTypeException(msg + " ConnectivityMeasure.")

        #### Update child Connectivities, if any.
        child_conns = dao.get_generic_entity(Connectivity, self.handled_datatype.gid, '_parent_connectivity')
        
        if len(child_conns) > 0:
            for one_conn in child_conns[1:]:
                one_conn.parent_connectivity = child_conns[0].gid
            if child_conns and child_conns[0]:
                child_conns[0].parent_connectivity = self.handled_datatype.parent_connectivity
            for one_child in child_conns:
                dao.store_entity(one_child)
        ABCRemover.remove_datatype(self, skip_validation)
        
        
开发者ID:gummadhav,项目名称:tvb-framework,代码行数:31,代码来源:remover_connectivity.py


示例17: add_operation_additional_info

 def add_operation_additional_info(self, message):
     """
     Adds additional info on the operation to be displayed in the UI. Usually a warning message.
     """
     current_op = dao.get_operation_by_id(self.operation_id)
     current_op.additional_info = message
     dao.store_entity(current_op)
开发者ID:unimauro,项目名称:tvb-framework,代码行数:7,代码来源:abcuploader.py


示例18: _run_cluster_job

    def _run_cluster_job(operation_identifier, user_name_label, adapter_instance):
        """
        Threaded Popen
        It is the function called by the ClusterSchedulerClient in a Thread.
        This function starts a new process.
        """
        # Load operation so we can estimate the execution time
        operation = dao.get_operation_by_id(operation_identifier)
        kwargs = parse_json_parameters(operation.parameters)
        kwargs = adapter_instance.prepare_ui_inputs(kwargs)
        time_estimate = int(adapter_instance.get_execution_time_approximation(**kwargs))
        hours = int(time_estimate / 3600)
        minutes = (int(time_estimate) % 3600) / 60
        seconds = int(time_estimate) % 60
        # Anything lower than 5 hours just use default walltime
        if hours < 5:
            walltime = "05:00:00"
        else:
            if hours < 10:
                hours = "0%d" % hours
            else:
                hours = str(hours)
            walltime = "%s:%s:%s" % (hours, str(minutes), str(seconds))

        call_arg = TvbProfile.current.cluster.SCHEDULE_COMMAND % (operation_identifier, user_name_label, walltime)
        LOGGER.info(call_arg)
        process_ = Popen([call_arg], stdout=PIPE, shell=True)
        job_id = process_.stdout.read().replace('\n', '').split(TvbProfile.current.cluster.JOB_ID_STRING)[-1]
        LOGGER.debug("Got jobIdentifier = %s for CLUSTER operationID = %s" % (operation_identifier, job_id))
        operation_identifier = model.OperationProcessIdentifier(operation_identifier, job_id=job_id)
        dao.store_entity(operation_identifier)
开发者ID:LauHoiYanGladys,项目名称:tvb-framework,代码行数:31,代码来源:backend_client.py


示例19: create_link

 def create_link(data_ids, project_id):
     """
     For a list of dataType IDs and a project id create all the required links.
     """
     for data in data_ids:
         link = model.Links(data, project_id)
         dao.store_entity(link)
开发者ID:sdiazpier,项目名称:tvb-framework,代码行数:7,代码来源:flow_service.py


示例20: update_dt

def update_dt(dt_id, new_create_date):
    dt = dao.get_datatype_by_id(dt_id)
    dt.create_date = new_create_date
    dao.store_entity(dt)
    # Update MetaData in H5 as well.
    dt = dao.get_datatype_by_gid(dt.gid)
    dt.persist_full_metadata()
开发者ID:amitsaroj001,项目名称:tvb-framework,代码行数:7,代码来源:modify_h5_metadata.py



注:本文中的tvb.core.entities.storage.dao.store_entity函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python flow_service.FlowService类代码示例发布时间:2022-05-27
下一篇:
Python dao.remove_entity函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap