• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python util.load函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中util.load函数的典型用法代码示例。如果您正苦于以下问题:Python load函数的具体用法?Python load怎么用?Python load使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了load函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: codeFile

def codeFile(args,flag,data): 
  PARAM_KEY = 1;
  PARAM_FILE = 2; # Output file location
  PARAM_FORMATTER = 3
  ARGUMENTS = len(args)-1
  # Ability to add a block of code through copy and paste and have it formatted correctly!
  if( keyExists("files",args[PARAM_KEY])):
    _file = json.loads(load("files/"+args[PARAM_KEY]));
    out = ''

    # loadJSON 
    for x in _file:
      block = str(load("blocks/"+ x))
      if(ARGUMENTS == PARAM_FORMATTER): # Alter all the blocks in said fashion
        block = format.block(block, args[PARAM_FORMATTER])     
      out += block
      out += "\n" # Adds some spacing between blocks

    # No file specified
    if(len(args) < 3 ): 
      log(out)
    else:
      log("Saving to file "+ args[PARAM_FILE] )
      save(args[PARAM_FILE],out)
  else:
    error("Error: File does not exist")
开发者ID:jelec,项目名称:codeSynergy,代码行数:26,代码来源:gen.py


示例2: codeProject

def codeProject(args,flag,data):
  PARAM_KEY = 1
  PARAM_PATH = 2
  PARAM_FORMATTER = 3
  ARGUMENTS = len(args)-1

  # JSON mapping files and storage of this
  if( keyExists("projects",args[1])):
    if( "stdout" in args[2]):
      project = json.loads(load("projects/"+args[PARAM_KEY])); # Uses key value storage
      directory = args[PARAM_PATH] + "/" + args[PARAM_KEY]
      
      mkdir(directory)
      for x in project.keys(): # Reflect that with here
        _file = json.loads(load("files/"+x));
        out = '';
        for y in _file:
          block = str(load("blocks/"+ y))
          if(ARGUMENTS == PARAM_FORMATTER): # Alter all the blocks in said fashion
            block = format.block(block, args[PARAM_FORMATTER])     
          out += block
        # Output the file with the correct file name
        save(directory + "/" + project[x],out)

  else:
    error("Error: Project does not exist")
开发者ID:jelec,项目名称:codeSynergy,代码行数:26,代码来源:gen.py


示例3: _buildmeta

def _buildmeta(ui, repo, args, partial=False, skipuuid=False):

    if repo is None:
        raise error.RepoError("There is no Mercurial repository"
                              " here (.hg not found)")

    dest = None
    validateuuid = False
    if len(args) == 1:
        dest = args[0]
        validateuuid = True
    elif len(args) > 1:
        raise hgutil.Abort('rebuildmeta takes 1 or no arguments')
    url = repo.ui.expandpath(dest or repo.ui.config('paths', 'default-push') or
                             repo.ui.config('paths', 'default') or '')

    meta = svnmeta.SVNMeta(repo, skiperrorcheck=True)

    svn = None
    if meta.subdir is None:
        svn = svnrepo.svnremoterepo(ui, url).svn
        meta.subdir = svn.subdir

    youngest = 0
    startrev = 0
    sofar = []
    branchinfo = {}
    if partial:
        try:
            # we can't use meta.lastpulled here because we are bootstraping the
            # lastpulled and want to keep the cached value on disk during a
            # partial rebuild
            foundpartialinfo = False
            youngestpath = os.path.join(meta.metapath, 'lastpulled')
            if os.path.exists(youngestpath):
                youngest = util.load(youngestpath)
                sofar = list(maps.RevMap.readmapfile(meta.revmap_file))
                if sofar and len(sofar[-1].split(' ', 2)) > 1:
                    lasthash = sofar[-1].split(' ', 2)[1]
                    startrev = repo[lasthash].rev() + 1
                    branchinfo = util.load(meta.branch_info_file)
                    foundpartialinfo = True
            if not foundpartialinfo:
                ui.status('missing some metadata -- doing a full rebuild\n')
                partial = False
        except IOError, err:
            if err.errno != errno.ENOENT:
                raise
            ui.status('missing some metadata -- doing a full rebuild\n')
        except AttributeError:
            ui.status('no metadata available -- doing a full rebuild\n')
开发者ID:fuzzball81,项目名称:dotfiles,代码行数:51,代码来源:svncommands.py


示例4: MDSPlotTest

def MDSPlotTest():
    import json
    import experiment
    resPath = "../experiments/ebook_color_pca_3"
    experiment.experimentCase("../params/ebook_color_pca_28x28_3.json",resPath)
    info = json.loads(util.fileString("../params/ebook_color_pca_28x28_3.json"))
    info = util.dotdict(info)
    x = util.load(resPath+"/x.pkl")
    print x.dtype
    compressed = util.load(resPath+"/compressed.pkl")
    MDSPlots(x,compressed,info.dataSet.shape)
    import matplotlib.pyplot as plt
    fig.savefig()
    print("show figure")
    plt.show()
开发者ID:proboscis,项目名称:GradProject,代码行数:15,代码来源:visualize.py


示例5: main

def main():
    # establish postgresql connection
    con = psycopg2.connect(database='mimic', user='mimic',
                           host='localhost', password='mimic')

    # 1. extract and export list of adults
    adults = extract_adults(con)
    adults.to_csv("lists/adults_admitted.csv", index=False,
                  columns=['subject_id', 'combined_dod', 'outtime', 'age'])
    # ----

    # 2. extract patients with cardiovascular conditions
    adults = load("lists/adults_admitted.csv")
    adults_list = set(adults['subject_id'].tolist())

    heart_patients = generate_heart_patients(con, adults_list)
    heart_patients.to_csv("lists/heart_patients.csv", index=False, header=True)

    adults_heart = pd.merge(adults, heart_patients,
                            on=['subject_id'], how='inner')

    # 3. then trim down the list to patients who were discharged alive
    discharged = lived(adults_heart)
    discharged.to_csv("lists/adults_heart_discharged.csv", index=False)

    # discharged = load("lists/adults_heart_discharged.csv")

    # 4. from those discharged patients, find the ones with a second
    # admission and calculate the data difference between first and second
    readmitted = readmission_diff(con, discharged['subject_id'].tolist())
    readmitted.to_csv("lists/readmission_diff.csv", index=False)

    # 5. generate icu id's of first visits (could be used for awk)
    generate_icu_id(con, "lists/readmission_diff.csv",
                    "lists/first_icu_list.txt")
开发者ID:yjlinotlazy,项目名称:Insight,代码行数:35,代码来源:generate_lists.py


示例6: sgd_optimize

def sgd_optimize(learning_rate=0.1,
                 pretrain_learning_rate=0.001,
                 pretrain_epochs=15,
                 finetune_epochs=1000,
                 batch_size=20):
    # Load datasets
    train, valid, test = util.load()
    print "loading 0 - ", train[0].shape[0], " train inputs in gpu memory"
    train_x, train_y = util.create_theano_shared(train)
        
    print "loading 0 - ", valid[0].shape[0], " validation inputs in gpu memory"
    valid_x, valid_y = util.create_theano_shared(valid)

    print "loading 0 - ", test[0].shape[0], " test inputs in gpu memory"
    test_x, test_y = util.create_theano_shared(test)

    n_train_batches = train[0].shape[0] / batch_size
    n_valid_batches = valid[0].shape[0] / batch_size
    n_test_batches = test[0].shape[0] / batch_size

    random_generator = numpy.random.RandomState(1)
    print "...Building model"
    sd = StackedDenoisingAutoEncoders(random_generator,
                                      hidden_layer_sizes=[1000, 1000, 1000])

    
    print "...Getting pretrain functions"
    pretrain_fns = sd.pretrain(train_x, batch_size)

    #############
    # Pretrain
    ############
    print "... Pre-training model"
    start_time = time.clock()
    ## Pre-train layer-wise
    corruption_levels = [.1, .2, .3]
    for i in range(sd.n_layers):
        for epoch in range(pretrain_epochs):
            c = []
            for batch_index in xrange(n_train_batches):
                c.append(pretrain_fns[i](index=batch_index,
                                         corruption_level=corruption_levels[i],
                                         learning_rate=pretrain_learning_rate))
            print 'Pre-training layer %i, epoch %d, cost ' % (i, epoch),
            print numpy.mean(c)

    end_time = time.clock()
    print "Pretraining code ran for %.2fm" % (end_time - start_time) 

    #############
    # Finetune
    ############

    print "...Fine-tuning model"
    train_model, valid_model, test_model = sd.finetune(train_x, train_y,
                                                       valid_x, valid_y,
                                                       test_x, test_y,
                                                       batch_size, learning_rate)
    util.train_test_model(finetune_epochs, train_model, valid_model, test_model,
                          n_train_batches, n_valid_batches, n_test_batches)
开发者ID:sjuvekar,项目名称:Theano,代码行数:60,代码来源:denoising_autoencoders.py


示例7: generate_features

def generate_features(provided_list=None):
    con = psycopg2.connect(database='mimic', user='mimic', host='localhost',
                           password='mimic')
    # load list of first ham id
    first_icu = load("lists/adults_heart_discharged.csv",
                     cols=['subject_id', 'hadm_id', 'icustay_id']).\
        sort_values(by=['subject_id'])

    # if a patient list if provided, then only look at those patients
    if provided_list is not None:
        first_icu = first_icu[first_icu['subject_id'].isin(provided_list)]

    hids = set(first_icu['hadm_id'].tolist())
    icu_list = first_icu['icustay_id'].tolist()
    p_list = first_icu['subject_id'].tolist()
    patients = first_icu['subject_id']

    # get comorbidity scores
    comorb = comorb_scores(con, hids)

    # get oasis scores etc
    phys_scores = generate_phys_scores(con, icu_list, p_list)
    combined_scores = np.concatenate((np.asmatrix(comorb),
                                      phys_scores), axis=1)
    np.savetxt("X.csv", combined_scores, delimiter=",")
    return patients, combined_scores
开发者ID:yjlinotlazy,项目名称:Insight,代码行数:26,代码来源:features.py


示例8: test_tour

def test_tour(par=1):
  ip.reset(par)
  scene = 'disrupt-11'
  scan = dset.Scan('../data/%s' % scene)
  texel_colors = ut.load(ut.pjoin(figures.make_path('noloo', 'interior-wide', scene), 'data.pk'))['ret'][0]
  mesh = box.load_from_mat(ut.pjoin(scan.path, 'cube.mat'))
  tour(scan, mesh, texel_colors, [0, 1, 2], plane_idx = 0, outline_start = 0, par = par)
开发者ID:abhishah,项目名称:camo,代码行数:7,代码来源:tour.py


示例9: retrieve_notes_sql

def retrieve_notes_sql():
    con = psycopg2.connect(database='mimic', user='mimic', host='localhost',
                           password='mimic')

    # load list of first icus
    icu_list = load("notes/adults_heart_discharged.csv",
                    cols=['subject_id', 'hadm_id'])

    adm_id = icu_list['hadm_id'].tolist()

    # then find the first icu stay not in above list
    query_list = "("+str(adm_id)[1:-1]+")"

    # get all relevant note during first stay
    sql_query = """
        SELECT SUBJECT_ID,hadm_id, chartdate, charttime,text
        FROM mimiciii.noteevents
        WHERE (hadm_id IN %s AND category != 'Discharge summary')
        ORDER BY SUBJECT_ID, chartdate, charttime
        ;
        """ % query_list
    notes = pd.read_sql_query(sql_query, con)
    notes.to_csv("notes/notes.csv", index=False)
    # print notes.shape
    return True
开发者ID:yjlinotlazy,项目名称:Insight,代码行数:25,代码来源:notes_nlp.py


示例10: get_patient_scores

def get_patient_scores(pid):
    # initialize postgres connection
    con = psycopg2.connect(database='mimic',
                           user='mimic', host='localhost',
                           password='mimic')
    stay_ids = load("lists/adults_heart_discharged.csv",
                    cols=['icustay_id'])['icustay_id'].tolist()

    # retrive from database all relevant scores
    sql_query = """
    SELECT * FROM mimiciii.oasis
    WHERE SUBJECT_ID = %d
    ;
    """ % (pid)
    oasis = pd.read_sql_query(sql_query, con)
    oasis = oasis[oasis['icustay_id'].isin(stay_ids)]

    sql_query = """
    SELECT * FROM mimiciii.sofa
    WHERE SUBJECT_ID = %d
    ;
    """ % (pid)
    sofa = pd.read_sql_query(sql_query, con)
    oasis = oasis[oasis['icustay_id'].isin(stay_ids)]

    sql_query = """
    SELECT * FROM mimiciii.sapsii
    WHERE SUBJECT_ID = %d
    ;
    """ % (pid)
    sapsii = pd.read_sql_query(sql_query, con)
    sapsii = sapsii[sapsii['icustay_id'].isin(stay_ids)]

    sql_query = """
    SELECT * FROM mimiciii.sapsii_last
    WHERE SUBJECT_ID = %d
    ;
    """ % (pid)
    sapsii_last = pd.read_sql_query(sql_query, con)
    sapsii_last = sapsii_last[sapsii_last['icustay_id'].isin(stay_ids)]

    # combine all scores
    data = [oasis.iloc[0][2], sofa.iloc[0][2],
            sapsii.iloc[0][2], sapsii_last.iloc[0][2]]

    # TODO: make a plot and display in html
    '''
    #barplot=ax.bar([0,1,2,3],data,0.6,color=['grey','white','grey','white'])
    #names = ax.set_xticklabels(['severity illness score',
    #                            'organ failure assessment',
    #                            'acute physiology score',
    #                            'acute physiology score(last)'])
    #ax.set_xticks([0,1,2,3])
    #ax.set_xlim(-0.3,3.8)
    #plt.gcf().subplots_adjust(bottom=0.25)
    #plt.setp(names,rotation=30,fontsize=13)
    #savefig("predict/fig.png")
    '''
    return str(data)[1:-1]
开发者ID:yjlinotlazy,项目名称:Insight,代码行数:59,代码来源:getinfo.py


示例11: saveModelImages

def saveModelImages(modelPath,dstPath,color = False):
    info,sda = util.load(modelPath)
    import train
    x = train.createDataSet(info["dataSet"]).get_value(borrow=True)
    for name,img in createSdaImages(sda,x,color):
        dst = dstPath + "/" + name
        util.ensurePathExists(dst)
        img.save(dst)
开发者ID:proboscis,项目名称:GradProject,代码行数:8,代码来源:visualize.py


示例12: saveTest

def saveTest():
    data = [0,1,2,3,4]
    name = "test.pkl"
    util.save(data,name)
    data2 = util.load(name)

    print data, data2
    return data == data2
开发者ID:proboscis,项目名称:GradProject,代码行数:8,代码来源:tests.py


示例13: read_test_data

def read_test_data(file):

    # assume if one is saved they all are
    if util.check_file_exists(CONST.DATASET_PATH + CONST.TEST_PATH):
        T_Data = util.load(CONST.DATASET_PATH + CONST.TEST_PATH)
        T_Labels = util.load(CONST.DATASET_PATH + CONST.TEST_PATH_LABELS)
        T_Queries = util.load(CONST.DATASET_PATH + CONST.TEST_PATH_Q)
        T_Docs = util.load(CONST.DATASET_PATH + CONST.TEST_PATH_DOCS)

    else:
        T_Data, T_Labels, T_Queries, T_Docs = read_train_data(file)

        util.save_pickle(CONST.DATASET_PATH + CONST.TEST_PATH, T_Data)
        util.save_pickle(CONST.DATASET_PATH + CONST.TEST_PATH_LABELS, T_Labels)
        util.save_pickle(CONST.DATASET_PATH + CONST.TEST_PATH_Q, T_Queries)
        util.save_pickle(CONST.DATASET_PATH + CONST.TEST_PATH_DOCS, T_Docs)

    return T_Data, T_Labels, T_Queries, T_Docs
开发者ID:hurshprasad,项目名称:active-learning-elo-letor,代码行数:18,代码来源:pre_processing.py


示例14: get_resource

def get_resource(resource_name):
	if RESOURCE_TO_ID.has_key(resource_name):
		return DATA_MAP[RESOURCE_TO_ID[resource_name]]
	resource_data = load(resource_name)
	resurouce_id = RESOURCE_COUNTER
	RESOURCE_COUNTER += 1
	RESOURCE_TO_ID[resource_name] = resurouce_id
	DATA_MAP[resurouce_id] = resource_data
	return resource_data
开发者ID:luciferous,项目名称:CrazyAnt,代码行数:9,代码来源:resource_manager.py


示例15: predict

def predict(data_json, model_path):
    preproc = util.load(os.path.dirname(model_path))
    dataset = load.load_dataset(data_json)
    x, y = preproc.process(*dataset)

    model = keras.models.load_model(model_path)
    probs = model.predict(x, verbose=1)

    return probs
开发者ID:yangbinfeng,项目名称:ecg,代码行数:9,代码来源:predict.py


示例16: createFile

def createFile(_file, formating, formatFlag):
  out = "" # Output string
  for x in _file:
      block = str(load("blocks/"+ x))
      if(formatFlag): # Alter all the blocks in said fashion
        block = format.block(block, formating)     
      out += block
      out += "\n" # Adds some spacing between blocks

  return out
开发者ID:jelec,项目名称:codeSynergy,代码行数:10,代码来源:gen.py


示例17: get_phys_scores

def get_phys_scores(con, icu_list, p_list):
    oasis = get_sql(con, "subject_id,icustay_id, oasis", "mimiciii.oasis")
    oasis = oasis[oasis['icustay_id'].isin(icu_list)].sort_values(
        by=['subject_id'])

    oasis_last = get_sql(con,
                         "subject_id,icustay_id, oasis",
                         "mimiciii.oasis_last")
    oasis_last = oasis_last[oasis_last['icustay_id'].
                            isin(icu_list)].sort_values(by=['subject_id'])

    saps = get_sql(con, 'subject_id,icustay_id, saps', 'mimiciii.saps')
    saps = saps[saps['icustay_id'].isin(icu_list)].sort_values(
        by=['subject_id'])

    sapsii = get_sql(con, 'subject_id,icustay_id, sapsii', 'mimiciii.sapsii')
    sapsii = sapsii[sapsii['icustay_id'].
                    isin(icu_list)].sort_values(by=['subject_id'])

    sapsii_last = get_sql(con,
                          'subject_id,icustay_id, sapsii',
                          'mimiciii.sapsii_last')
    sapsii_last = sapsii_last[sapsii_last['icustay_id'].
                              isin(icu_list)].sort_values(by=['subject_id'])

    sofa = get_sql(con, 'subject_id,icustay_id, sofa', 'mimiciii.sofa')
    sofa = sofa[sofa['icustay_id'].isin(icu_list)].\
        sort_values(by=['subject_id'])

    # length of stay as numerical feature
    icu_los = get_sql(con, 'subject_id,icustay_id, los',
                      'mimiciii.ICUSTAYS')
    icu_los = icu_los[icu_los['icustay_id'].isin(
        icu_list)].sort_values(by=['subject_id'])

    # turn everything in to matrices
    oasis_m = np.asmatrix(oasis)[:, 2]
    oasis_last_m = np.asmatrix(oasis_last)[:, 2]
    sofa_m = np.asmatrix(sofa)[:, 2]
    saps_m = np.asmatrix(saps)[:, 2]
    sapsii_m = np.asmatrix(sapsii)[:, 2]
    sapsii_last_m = np.asmatrix(sapsii_last)[:, 2]
    icu_los_m = np.asmatrix(icu_los)[:, 2]

    age = load("lists/adults_admitted.csv",
               cols=['subject_id', 'age'])
    age = age[age['subject_id'].isin(
        p_list)].sort_values(by=['subject_id'])
    age_m = np.asmatrix(age)[:, 1]

    # combine all features into one feature vector and return
    v = np.concatenate((oasis_m, sofa_m, sapsii_m, sapsii_last_m,
                        age_m, icu_los_m), axis=1)
    return v
开发者ID:yjlinotlazy,项目名称:Insight,代码行数:54,代码来源:get_features.py


示例18: run

def run(data, args):
	pdata = load(args.patchfile)
	newdata = data
	for i, d in enumerate(data):
		crc = str(crc32(d['orig'].encode('utf-8')) & 0xffffffff)
		if crc in pdata:
			newdata[i][args.lang] = pdata[crc]
		else:
			print(u"Unknown: {0}".format(crc))

	return newdata
开发者ID:KanColleTool,项目名称:kancolle-translation,代码行数:11,代码来源:crcpatch.py


示例19: run

def run(data, args):
	pd = load(args.infile)
	d = {}
	for item in data:
		if not item['ctx'] in d:
			d[item['ctx']] = {}
		d[item['ctx']][item['orig']] = item

	for item in pd:
		if item[args.lang]:
			d[item['ctx']][item['orig']][args.lang] = item[args.lang]
	return data
开发者ID:KanColleTool,项目名称:kancolle-translation,代码行数:12,代码来源:merge.py


示例20: sdaImageTest

def sdaImageTest(modelPath):
    info,model = util.load(modelPath)
    print info
    print model
    shape = info["dataSet"]["shape"]
    color = len(shape) == 3
    import matplotlib.pyplot as plt
    for name,fig in sdaLayerImages2(model,3):
        fig.show()
        raw_input()
        
    plt.show()
开发者ID:proboscis,项目名称:GradProject,代码行数:12,代码来源:visualize.py



注:本文中的util.load函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python util.load_config函数代码示例发布时间:2022-05-26
下一篇:
Python util.listdir函数代码示例发布时间:2022-05-26
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap