• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python serial.preprocess函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中pylearn2.utils.serial.preprocess函数的典型用法代码示例。如果您正苦于以下问题:Python preprocess函数的具体用法?Python preprocess怎么用?Python preprocess使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了preprocess函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: train_nice

def train_nice(args):
    vn = True
    center = True
    if args.transposed:
        fmri = MRI.MRI_Transposed(dataset_name=args.dataset_name,
                                  even_input=True)
        input_dim = fmri.X.shape[1]
        del fmri
    else:
        data_path = serial.preprocess("${PYLEARN2_NI_PATH}/" + args.dataset_name)
        mask_file = path.join(data_path, "mask.npy")
        mask = np.load(mask_file)
        input_dim = (mask == 1).sum()
        if input_dim % 2 == 1:
            input_dim -= 1

    logging.info("Input shape: %d" % input_dim)

    p = path.abspath(path.dirname(__file__))
    yaml_file = path.join(p, "nice_%s.yaml" % args.dataset_name)
    user = path.expandvars("$USER")
    save_file = "nice_%s%s%s" % (args.dataset_name,
                                 "_transposed" if args.transposed else "",
                                 "_logistic" if args.logistic else "")
    save_path = serial.preprocess("/export/mialab/users/%s/pylearn2_outs/%s"
                                  % (user, save_file))
    variance_map_file = path.join(data_path, "variance_map.npy")
    if not path.isfile(variance_map_file):
        raise ValueError("Variance map file %s not found."
                         % variance_map_file)
    train(yaml_file, save_path, input_dim,
          args.transposed, args.logistic, variance_map_file)
开发者ID:ecastrow,项目名称:pl2mind,代码行数:32,代码来源:train_nice_smri.py


示例2: test_rbm

def test_rbm():
    save_path = path.join(serial.preprocess("${PYLEARN2_OUTS}"), "tutorials")
    if not path.isdir(serial.preprocess("${PYLEARN2_OUTS}")):
        raise IOError("PYLEARN2_OUTS environment variable not set")
    train_rbm.train_rbm(epochs = 1, save_path=save_path)
    mri_analysis.main(path.join(save_path, "rbm_smri.pkl"),
                      save_path, "sz_t")
开发者ID:ecastrow,项目名称:pl2mind,代码行数:7,代码来源:test_rbm_smri.py


示例3: main

def main(args):
    dataset_name = args.dataset_name

    logger.info("Getting dataset info for %s" % dataset_name)
    data_path = serial.preprocess("${PYLEARN2_NI_PATH}/" + dataset_name)
    mask_file = path.join(data_path, "mask.npy")
    mask = np.load(mask_file)
    input_dim = (mask == 1).sum()

    user = path.expandvars("$USER")
    save_path = serial.preprocess("/export/mialab/users/%s/pylearn2_outs/%s"
                                  % (user, "rbm_simple_test"))

    # File parameters are path specific ones (not model specific).
    file_params = {"save_path": save_path,
                   }

    yaml_template = open(yaml_file).read()
    hyperparams = expand(flatten(experiment.default_hyperparams(input_dim=input_dim)),
                         dict_type=ydict)

    # Set additional hyperparams from command line args
    if args.learning_rate is not None:
        hyperparams["learning_rate"] = args.learning_rate
    if args.batch_size is not None:
        hyperparams["batch_size"] = args.batch_size

    for param in file_params:
        yaml_template = yaml_template.replace("%%(%s)s" % param, file_params[param])

    yaml = yaml_template % hyperparams

    logger.info("Training")
    train = yaml_parse.load(yaml)
    train.main_loop()
开发者ID:ecastrow,项目名称:pl2mind,代码行数:35,代码来源:simple_train.py


示例4: train_nice

def train_nice(args):
    vn = True
    center = True
    logger.info("Getting dataset info for %s" % args.dataset_name)
    data_path = serial.preprocess("${PYLEARN2_NI_PATH}/" + args.dataset_name)
    if args.transposed:
        logger.info("Data in transpose...")
        mri = MRI.MRI_Transposed(dataset_name=args.dataset_name,
                                 unit_normalize=True,
                                 even_input=True,
                                 apply_mask=True)
        input_dim = mri.X.shape[1]
        variance_map_file = path.join(data_path, "transposed_variance_map.npy")
    else:
        mask_file = path.join(data_path, "mask.npy")
        mask = np.load(mask_file)
        input_dim = (mask == 1).sum()
        if input_dim % 2 == 1:
            input_dim -= 1
        mri = MRI.MRI_Standard(which_set="full",
                               dataset_name=args.dataset_name,
                               unit_normalize=True,
                               even_input=True,
                               apply_mask=True)
        variance_map_file = path.join(data_path, "variance_map.npy")
    save_variance_map(mri, variance_map_file)

    logger.info("Input shape: %d" % input_dim)

    p = path.abspath(path.dirname(__file__))
    yaml_file = path.join(p, "nice_mri.yaml")
    user = path.expandvars("$USER")

    if args.out_name is not None:
        out_name = args.out_name
    else:
        out_name = args.dataset_name
    save_file = "nice_%s%s%s" % (out_name,
                                 "_transposed" if args.transposed else "",
                                 "_logistic" if args.logistic else "")
    save_path = serial.preprocess("/export/mialab/users/%s/pylearn2_outs/%s"
                                  % (user, save_file))
    if path.isfile(save_path + ".pkl") or path.isfile(save_path + "_best.pkl"):
        answer = None
        while answer not in ["Y", "N", "y", "n"]:
            answer = raw_input("%s already exists, continuing will overwrite."
                               "\nOverwrite? (Y/N)[N]: " % save_path) or "N"
            if answer not in ["Y", "N", "y", "n"]:
                print "Please answer Y or N"
        if answer in ["N", "n"]:
            print "If you want to run without overwrite, consider using the -o option."
            sys.exit()

    logger.info("Saving to prefix %s" % save_path)

    if not path.isfile(variance_map_file):
        raise ValueError("Variance map file %s not found."
                         % variance_map_file)
    train(yaml_file, save_path, input_dim,
          args.transposed, args.logistic, variance_map_file, args.dataset_name)
开发者ID:ecastrow,项目名称:pl2mind,代码行数:60,代码来源:train_nice_mri.py


示例5: test_rbm

def test_rbm():
    save_path = path.join(serial.preprocess("${PYLEARN2_OUTS}"), "tutorials")
    if not path.isdir(serial.preprocess("${PYLEARN2_OUTS}")):
        raise IOError("PYLEARN2_OUTS environment variable not set")

    train_rbm.train_rbm(epochs = 1, save_path=save_path)
    show_weights.show_weights(path.join(save_path, "rbm_mnist.pkl"),
                              out=path.join(save_path, "rbm_mnist_weights.png"))
开发者ID:ecastrow,项目名称:pl2mind,代码行数:8,代码来源:test_rbm_mnist.py


示例6: load_aod_gts

    def load_aod_gts(self):
        p = path.join(self.dataset_root, "aod_extra/")

        if not(path.isdir(serial.preprocess(p))):
            raise IOError("AOD extras directory %s not found."
                          % serial.preprocess(p))

        targets = np.load(serial.preprocess(p + "targets.npy"))
        novels = np.load(serial.preprocess(p + "novels.npy"))
        return targets, novels
开发者ID:ecastrow,项目名称:pl2mind,代码行数:10,代码来源:MRI.py


示例7: __init__

    def __init__(self,
                 which_set,
                 data_path=None,
                 center=True,
                 rescale=True,
                 gcn=True):
        self.class_name = ['neg', 'pos']
        # load data
        path = "${PYLEARN2_DATA_PATH}/cin/"
        #datapath = path + 'feature850-2-1.pkl'
        if data_path is None:
            data_path = path + 'feature850-2-1.pkl'
        else:
            data_path = path + data_path
        data_path = serial.preprocess(data_path)
        with  open(data_path, 'rb') as f:
            #f = open(datapath, 'rb')
            train_set, valid_set, test_set = cPickle.load(f)
            #f.close()

        self.train_set = train_set
        self.valid_set = valid_set
        self.test_set = test_set
        if which_set == 'train':
            X, Y = self.train_set
        elif which_set == 'valid':
            X, Y = self.valid_set
        else:
            X, Y = self.test_set

        X.astype(float)
        axis = 0
        _max = np.max(X, axis=axis)
        _min = np.min(X, axis=axis)
        _mean = np.mean(X, axis=axis)
        _std = np.std(X, axis=axis)
        _scale = _max - _min


        # print _max
        # print _min
        # print _mean
        # print _std

        if gcn:
            X = global_contrast_normalize(X, scale=gcn)
        else:
            if center:
                X[:, ] -= _mean
            if rescale:
                X[:, ] /= _scale

        # topo_view = X.reshape(X.shape[0], X.shape[1], 1, 1)
        # y = np.reshape(Y, (Y.shape[0], 1))
        # y = np.atleast_2d(Y).T
        y = np.zeros((Y.shape[0], 2))
        y[:, 0] = Y
        y[:, 0] = 1 - Y
        print X.shape, y.shape
        super(CIN_FEATURE2, self).__init__(X=X, y=y)
开发者ID:jackal092927,项目名称:pylearn2_med,代码行数:60,代码来源:cin_feature2.py


示例8: __init__

    def __init__(self, jobs, db, name, updater, analyzer, alerter, reload=False):
        self.__dict__.update(locals())

        self.table_dir = serial.preprocess(path.join(args.out_dir,
                                                     self.name))
        self.html = HTMLPage(self.name + " results")

        self.analyzer.start()
        self.updater.start()
开发者ID:ecastrow,项目名称:pl2mind,代码行数:9,代码来源:jobman_analysis.py


示例9: getFilename

 def getFilename(i):
     base = path+'snapshot_'
     if i<10:
         out= base+'00%d.hdf5'%i
     elif i<100:
         out= base+'0%d.hdf5'%i
     else:
         out= base+'%d.hdf5'%i
     return serial.preprocess(out)
开发者ID:Samantha-Thrush,项目名称:pylearn2,代码行数:9,代码来源:nanoParticle.py


示例10: test_data

def test_data():
    pylearn2_out_path = path.expandvars("$PYLEARN2_OUTS")
    assert pylearn2_out_path != "", ("PYLEARN2_OUTS environment variable is "
                                     "not set.")

    pylearn2_data_path = path.expandvars("$PYLEARN2_NI_PATH")
    assert pylearn2_data_path != "", ("PYLEARN2_NI_PATH environment"
                                      " variable is not set")

    data_path = serial.preprocess("${PYLEARN2_NI_PATH}/smri/")
    extras_path = serial.preprocess("${PYLEARN2_NI_PATH}/mri_extra/")

    try:
        assert path.isdir(data_path), data_path
        assert path.isdir(extras_path), extras_path
    except AssertionError as e:
        raise IOError("File or directory not found (%s), did you set your "
                      "PYLEARN2_NI_PATH correctly? (%s)" % (e, data_path))
开发者ID:ecastrow,项目名称:pl2mind,代码行数:18,代码来源:test_rbm_smri.py


示例11: __init__

    def __init__(self, which_set, start=None, stop=None, shuffle=False):
        if which_set not in ['train', 'valid']:
            if which_set == 'test':
                raise ValueError(
                    "Currently test datasets not supported")
            raise ValueError(
                'Unrecognized which_set value "%s".' % (which_set,) +
                '". Valid values are ["train","valid"].')

        p = "${PYLEARN2_NI_PATH}/snp/"
        if which_set == 'train':
            data_path = p + 'gen.chr1.npy'
            label_path = p + 'gen.chr1_labels.npy'
        else:
            assert which_set == 'test'
            data_path = p + 'test.npy'
            label_path = p + 'test_labels.npy'

        data_path = serial.preprocess(data_path)
        label_path = serial.preprocess(label_path)

        print "Loading data"
        topo_view = np.load(data_path)
        y = np.atleast_2d(np.load(label_path)).T
        samples, number_snps = topo_view.shape

        if start is not None:
            stop = stop if (stop <= samples) else samples
            assert 0 <= start < stop
            topo_view = topo_view[start:stop, :]
            y = y[start:stop]

        if shuffle:
            self.shuffle_rng = make_np_rng(None, default_seed=[1, 2, 3], which_method="shuffle")
            for i in xrange(samples):
                j = self.shuffle_rng.randint(samples)
                tmp = topo_view[i].copy()
                topo_view[i] = topo_view[j]
                topo_view[j] = tmp
                tmp = y[i,i+1].copy()
                y[i] = y[j]
                y[j] = tmp

        super(SNP, self).__init__(X=topo_view, y=y, y_labels=np.amax(y)+1)
开发者ID:ecastrow,项目名称:pl2mind,代码行数:44,代码来源:SNP.py


示例12: __init__

    def __init__(self, which_set, one_hot=False, axes=['b', 0, 1, 'c']):
        """
        .. todo::

            WRITEME
        """
        self.args = locals()

        assert which_set in self.data_split.keys()

        path = serial.preprocess(
            "${PYLEARN2_DATA_PATH}/ocr_letters/letter.data")
        with open(path, 'r') as data_f:
            data = data_f.readlines()
            data = [line.split("\t") for line in data]

        data_x = [map(int, item[6:-1]) for item in data]
        data_letters = [item[1] for item in data]
        data_fold = [int(item[5]) for item in data]

        letters = list(numpy.unique(data_letters))
        data_y = [letters.index(item) for item in data_letters]

        if which_set == 'train':
            split = slice(0, self.data_split['train'])
        elif which_set == 'valid':
            split = slice(self.data_split['train'], self.data_split['train'] +
                          self.data_split['valid'])
        elif which_set == 'test':
            split = slice(self.data_split['train'] + self.data_split['valid'],
                          (self.data_split['train'] +
                           self.data_split['valid'] +
                           self.data_split['test']))

        data_x = numpy.asarray(data_x[split])
        data_y = numpy.asarray(data_y[split])
        data_fold = numpy.asarray(data_y[split])
        assert data_x.shape[0] == data_y.shape[0]
        assert data_x.shape[0] == self.data_split[which_set]

        self.one_hot = one_hot
        if one_hot:
            one_hot = numpy.zeros(
                (data_y.shape[0], len(letters)), dtype='float32')
            for i in xrange(data_y.shape[0]):
                one_hot[i, data_y[i]] = 1.
            data_y = one_hot

        view_converter = dense_design_matrix.DefaultViewConverter(
            (16, 8, 1), axes)
        super(OCR, self).__init__(
            X=data_x, y=data_y, view_converter=view_converter)

        assert not contains_nan(self.X)
        self.fold = data_fold
开发者ID:Deathmonster,项目名称:pylearn2,代码行数:55,代码来源:ocr.py


示例13: main

def main(dataset_name="smri"):
    logger.info("Getting dataset info for %s" % args.dataset_name)
    data_path = serial.preprocess("${PYLEARN2_NI_PATH}/" + args.dataset_name)
    mask_file = path.join(data_path, "mask.npy")
    mask = np.load(mask_file)
    input_dim = (mask == 1).sum()
    if input_dim % 2 == 1:
        input_dim -= 1
    mri = MRI.MRI_Standard(which_set="full",
                           dataset_name=args.dataset_name,
                           unit_normalize=True,
                           even_input=True,
                           apply_mask=True)
    variance_map_file = path.join(data_path, "variance_map.npy")
    mri_nifti.save_variance_map(mri, variance_map_file)

    user = path.expandvars("$USER")
    save_path = serial.preprocess("/export/mialab/users/%s/pylearn2_outs/%s"
                                  % (user, "jobman_test"))

    file_params = {"save_path": save_path,
                   "variance_map_file": variance_map_file
                   }

    yaml_template = open(yaml_file).read()
    hyperparams = expand(flatten(mlp_experiment.default_hyperparams(input_dim=input_dim)),
                         dict_type=ydict)

    for param in hyperparams:
        if hasattr(args, param) and getattr(args, param):
            val = getattr(args, param)
            logger.info("Filling %s with %r" % (param, val))
            hyperparams[param] = type(hyperparams[param])(val)

    for param in file_params:
        yaml_template = yaml_template.replace("%%(%s)s" % param, file_params[param])

    yaml = yaml_template % hyperparams
    print yaml
    logger.info("Training")
    train = yaml_parse.load(yaml)
    train.main_loop()
开发者ID:ecastrow,项目名称:pl2mind,代码行数:42,代码来源:simple_train.py


示例14: get_input_params

    def get_input_params(self, args, hyperparams):
        data_path = serial.preprocess("${PYLEARN2_NI_PATH}/" + args.dataset_name)

        data_class = hyperparams["data_class"]
        variance_normalize = hyperparams.get("variance_normalize", False)
        unit_normalize = hyperparams.get("unit_normalize", False)
        demean = hyperparams.get("demean", False)
        assert not (variance_normalize and unit_normalize)

        logger.info((data_class, variance_normalize, unit_normalize, demean))
        h = hash((data_class, variance_normalize, unit_normalize, demean))

        if self.d.get(h, False):
            return self.d[h]
        else:
            if data_class == "MRI_Transposed":
                assert not variance_normalize
                mri = MRI.MRI_Transposed(dataset_name=args.dataset_name,
                                         unit_normalize=unit_normalize,
                                         demean=demean,
                                         even_input=True,
                                         apply_mask=True)
                input_dim = mri.X.shape[1]
                variance_file_name = ("variance_map_transposed%s%s.npy"
                                      % ("_un" if unit_normalize else "",
                                         "_dm" if demean else ""))

            elif data_class == "MRI_Standard":
                assert not demean
                mask_file = path.join(data_path, "mask.npy")
                mask = np.load(mask_file)
                input_dim = (mask == 1).sum()
                if input_dim % 2 == 1:
                    input_dim -= 1
                mri = MRI.MRI_Standard(which_set="full",
                                       dataset_name=args.dataset_name,
                                       unit_normalize=unit_normalize,
                                       variance_normalize=variance_normalize,
                                       even_input=True,
                                       apply_mask=True)
                variance_file_name = ("variance_map%s%s.npy"
                                      % ("_un" if unit_normalize else "",
                                         "_vn" if variance_normalize else ""))
                logger.info(variance_file_name)
                logger.info((data_class, variance_normalize, unit_normalize, demean))

        variance_map_file = path.join(data_path, variance_file_name)
        if not path.isfile(variance_map_file):
            logger.info("Saving variance file %s" % variance_map_file)
            mri_nifti.save_variance_map(mri, variance_map_file)
        self.d[h] = (input_dim, variance_map_file)
        return self.d[h]
开发者ID:ecastrow,项目名称:pl2mind,代码行数:52,代码来源:load_rbm_experiments.py


示例15: train_nice

def train_nice():
    vn = True
    center = True
    smri = MRI.MRI_Transposed(dataset_name="smri",
                              even_input=True)
    input_dim = smri.X.shape[1]

    p = path.abspath(path.dirname(__file__))
    yaml_file = path.join(p, "nice_smri_transposed.yaml")
    user = path.expandvars("$USER")
    save_path = serial.preprocess("/export/mialab/users/%s/pylearn2_outs/" % user)
    assert path.isdir(save_path)
    train(yaml_file, save_path, input_dim)
开发者ID:ecastrow,项目名称:pl2mind,代码行数:13,代码来源:train_nice_smri_transposed.py


示例16: loadall

    def loadall(self):
        datasets = []
        for i in range(self.foldn):
            filename = self.filestr.format(str(i + 1))
            filename = dirpath + filename
            filename = serial.preprocess(filename)
            print "load data file: " + filename
            self.loadi(i, filename=filename)

        dataset = datasets[0]
        X, y = datasetXy
        # print X.shape, y.shape

        return datasets
开发者ID:jackal092927,项目名称:pylearn2_med,代码行数:14,代码来源:cin_feature2_composite.py


示例17: run_experiment

def run_experiment(experiment, **kwargs):
    """
    Experiment function.
    Used by jobman to run jobs. Must be loaded externally.
    TODO: add sigint handling.

    Parameters
    ----------
    experiment: module
        Experiment module.
    kwargs: dict
        Typically hyperparameters.
    """

    hyper_parameters = experiment.default_hyperparams()
    set_hyper_parameters(hyper_parameters, **kwargs)
    file_parameters = experiment.fileparams
    set_hyper_parameters(file_parameters, **kwargs)
    hyper_parameters.update(file_parameters)

    ih = MRIInputHandler()
    input_dim, variance_map_file = ih.get_input_params(hyper_parameters)
    hyper_parameters["nvis"] = input_dim
    hyper_parameters["variance_map_file"] = variance_map_file

    pid = os.getpid()
    out_path = serial.preprocess(
        hyper_parameters.get("out_path", "${PYLEARN2_OUTS}"))
    if not path.isdir(out_path):
        os.mkdir(out_path)
    if not path.isdir(path.join(out_path, "logs")):
        os.mkdir(path.join(out_path, "logs"))

    hyper_parameters = expand(flatten(hyper_parameters), dict_type=ydict)

    lh = LogHandler(experiment, hyper_parameters, out_path, pid)
    h = logging.StreamHandler(lh)
    monitor.log.addHandler(h)

    yaml_template = open(experiment.yaml_file).read()
    yaml = yaml_template % hyper_parameters
    train_object = yaml_parse.load(yaml)
    try:
        train_object.main_loop()
        lh.finish("COMPLETED")
    except KeyboardInterrupt:
        print("Quitting...")
        lh.finish("KILLED")
开发者ID:ecastrow,项目名称:pl2mind,代码行数:48,代码来源:__init__.py


示例18: __init__

 def __init__(self, which_set='train', center=False, start=None, stop=None,
              axes=['b', 'c', 0, 1], preprocessor=None,
              fit_preprocessor=False, fit_test_preprocessor=False):
     self.shape = (8, 35, 57)
     self.size = {'train': 2849, 'valid': 2849, 'test': 2849}
     self.range = (-10, 10)
     self.path = "${PYLEARN2_DATA_PATH}/ecmwf/"
     self.set_path = {'train': 'ecmwf.train', 'valid': 'ecmwf.val', 'test': 'ecmwf.test'}
     self.args = locals()
     if which_set not in ['train', 'valid', 'test']:
         raise ValueError(
             'Unrecognized which_set value "%s".' % (which_set,) +
             '". Valid values are ["train","valid","test"].')
     path = self.path + self.set_path[which_set]
     if control.get_load_data():
         path = serial.preprocess(path)
         datasetCache = cache.datasetCache
         path = datasetCache.cache_file(path)
         X, topo_view, y = self._read_ecmwf(path, which_set)
     else:
         X = np.random.rand(self.size[which_set], np.prod(self.shape))
         topo_view = np.random.rand(self.size[which_set]*np.prod(self.shape))
         y = np.random.randint(self.range[0], self.range[1], (self.size[which_set], 1))
     (m, v, r, c) = topo_view.shape
     if center:
         topo_view -= topo_view.mean(axis=0)
     super(ECMWF, self).__init__(X=X, topo_view=topo_view, y=y, axes=axes)
     assert not np.any(np.isnan(self.X))
     if start is not None:
         assert start >= 0
         if stop > self.X.shape[0]:
             raise ValueError('stop=' + str(stop) + '>' +
                              'm=' + str(self.X.shape[0]))
         assert stop > start
         self.X = self.X[start:stop, :]
         if self.X.shape[0] != stop - start:
             raise ValueError("X.shape[0]: %d. start: %d stop: %d"
                              % (self.X.shape[0], start, stop))
         if len(self.y.shape) > 1:
             self.y = self.y[start:stop, :]
         else:
             self.y = self.y[start:stop]
         assert self.y.shape[0] == stop - start
     if which_set == 'test':
         assert fit_test_preprocessor is None or \
             (fit_preprocessor == fit_test_preprocessor)
     if self.X is not None and preprocessor:
         preprocessor.apply(self, fit_preprocessor)
开发者ID:daviddiazvico,项目名称:pylearn2,代码行数:48,代码来源:ecmwf.py


示例19: get_mask

    def get_mask(self):
        """
        Get mask for dataset.

        Parameters
        ----------

        Returns
        -------
        mask: array-like
            4D array of 1 and 0 values.
        """
        p = path.join(self.dataset_root, self.dataset_name + "/")
        mask_path = serial.preprocess(p + "mask.npy")
        mask = np.load(mask_path)
        if not np.all(np.bitwise_or(mask == 0, mask == 1)):
            raise ValueError("Mask has incorrect values.")
        return mask
开发者ID:ecastrow,项目名称:pl2mind,代码行数:18,代码来源:MRI.py


示例20: test_data

def test_data():
    pylearn2_data_path = path.expandvars("$PYLEARN2_DATA_PATH")
    assert pylearn2_data_path != "", ("PYLEARN2_DATA_PATH environment"
                                      " variable is not set")

    data_path = serial.preprocess("${PYLEARN2_DATA_PATH}/mnist/")
    try:
        assert path.isdir(data_path), data_path
        assert path.isfile(path.join(data_path, "t10k-images-idx3-ubyte")),\
            "t10k-images-idx3-ubyte"
        assert path.isfile(path.join(data_path, "t10k-labels-idx1-ubyte")),\
            "t10k-labels-idx1-ubyte"
        assert path.isfile(path.join(data_path, "train-images-idx3-ubyte")),\
            "train-images-idx3-ubyte"
        assert path.isfile(path.join(data_path, "train-labels-idx1-ubyte")),\
            "train-labels-idx1-ubyte"
    except AssertionError as e:
        raise IOError("File or directory not found (%s), did you set "
                      "PYLEARN2_DATA_PATH correctly? (%s)" % (e, data_path))
开发者ID:ecastrow,项目名称:pl2mind,代码行数:19,代码来源:test_rbm_mnist.py



注:本文中的pylearn2.utils.serial.preprocess函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python serial.save函数代码示例发布时间:2022-05-25
下一篇:
Python serial.mkdir函数代码示例发布时间:2022-05-25
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap