• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python storm_analysis.getPathOutputTest函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中storm_analysis.getPathOutputTest函数的典型用法代码示例。如果您正苦于以下问题:Python getPathOutputTest函数的具体用法?Python getPathOutputTest怎么用?Python getPathOutputTest使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了getPathOutputTest函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: configureTest

def configureTest():
    """
    These tests have a lot of setup. This function takes care of this.
    """
    mparams = params.ParametersMultiplane()
        
    # sCMOS calibration files.
    gain = numpy.ones(im_size)
    offset = numpy.zeros(im_size)
    variance = numpy.ones(im_size)
    rqe = numpy.ones(im_size)
    
    cal_file = storm_analysis.getPathOutputTest("c1_cal.npy")
    numpy.save(cal_file, [offset, variance, gain, rqe, 2])
    mparams.changeAttr("channel0_cal", cal_file)

    cal_file = storm_analysis.getPathOutputTest("c2_cal.npy")
    numpy.save(cal_file, [offset, variance, gain, rqe, 2])
    mparams.changeAttr("channel1_cal", cal_file)

    mparams.changeAttr("channel0_ext", "_c1.tif")
    mparams.changeAttr("channel1_ext", "_c2.tif")

    mparams.changeAttr("channel0_offset", 0)
    mparams.changeAttr("channel1_offset", 0)

    return mparams
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:27,代码来源:test_mp_analysis_io.py


示例2: test_hdf5_to_bin_2

def test_hdf5_to_bin_2():
    """
    Test tracks conversion.
    """
    peaks = {"x" : numpy.zeros(10),
             "y" : numpy.ones(10)}

    h5_name = storm_analysis.getPathOutputTest("test_sa_hdf5.hdf5")
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.addMetadata("<settings/>")
        h5.setMovieInformation(256, 256, 10, "XYZZY")
        h5.setPixelSize(100.0)
        h5.addTracks(peaks)

    # Convert.
    i3_name = storm_analysis.getPathOutputTest("test_mlist.bin")
    storm_analysis.removeFile(i3_name)
    hdf5ToBin.hdf5ToBin(h5_name, i3_name)

    # Load Insight3 file and check values.
    i3_data = readinsight3.loadI3File(i3_name, verbose = False)

    assert(numpy.allclose(peaks["x"], i3_data['x'] - 1.0))
    assert(numpy.allclose(peaks["y"], i3_data['y'] - 1.0))    
    assert(numpy.allclose(i3_data['fr'], numpy.ones(10)))
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:28,代码来源:test_hdf5_to_bin.py


示例3: test_micrometry_2

def test_micrometry_2():
    """
    Test micrometry on random data.
    """

    locs1_name = storm_analysis.getPathOutputTest("locs1.hdf5")
    locs2_name = storm_analysis.getPathOutputTest("locs2.hdf5")

    # Create test data.
    im_size = 512
    n_points = 50

    numpy.random.seed(0)

    with saH5Py.SAH5Py(locs1_name, is_existing = False, overwrite = True) as h5:
        locs = {"x" : numpy.random.uniform(high = im_size, size = n_points),
                "y" : numpy.random.uniform(high = im_size, size = n_points)}
        h5.setMovieInformation(512, 512, 1, "")
        h5.addLocalizations(locs, 0)

    with saH5Py.SAH5Py(locs2_name, is_existing = False, overwrite = True) as h5:
        locs = {"x" : numpy.random.uniform(high = im_size, size = n_points),
                "y" : numpy.random.uniform(high = im_size, size = n_points)}
        h5.setMovieInformation(512, 512, 1, "")
        h5.addLocalizations(locs, 0)

    # Test
    mm = micrometry.Micrometry(locs1_name,
                               min_size = 5.0,
                               max_size = 100.0,
                               max_neighbors = 20)
    [best_ratio, best_transform] = mm.findTransform(locs2_name, 1.0e-2)

    assert(best_ratio < 10.0)
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:34,代码来源:test_micrometry.py


示例4: test_merge_2

def test_merge_2():
    """
    Test file merging, skipping files with no tracks.
    """
    metadata = "<xml><field1><data1>data</data1></field></xml>"
    ref_tracks = {"x" : numpy.random.randint(0,10,10),
                  "y" : numpy.random.randint(0,10,10)}

    # Create HDF5 files to merge.
    h5_names = []
    for i in range(3):
        h5_name = storm_analysis.getPathOutputTest("test_merge_f" + str(i) + ".hdf5")
        h5_names.append(h5_name)

        with saH5Py.SAH5Py(h5_name, is_existing = False, overwrite = True) as h5:
            h5.addMetadata(metadata)
            h5.setMovieInformation(20,20,1,"")
            h5.setPixelSize(100.0)
            if(i != 1):
                h5.addTracks(ref_tracks)

    # Merge.
    merge_name = storm_analysis.getPathOutputTest("test_merge.hdf5")
    storm_analysis.removeFile(merge_name)
    mergeHDF5.mergeHDF5(h5_names, merge_name)

    # Check merge.
    with saH5Py.SAH5Py(merge_name) as h5:
        assert(metadata == h5.getMetadata())
        for tracks in h5.tracksIterator():
            assert(numpy.allclose(ref_tracks["x"], tracks["x"]))
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:31,代码来源:test_merge.py


示例5: create2DSpline

def create2DSpline():
    movie = storm_analysis.getData("test/data/test.dax")
    mlist = storm_analysis.getData("test/data/test_ref.hdf5")
    psf = storm_analysis.getPathOutputTest("test_spliner_psf_2d.psf")
    spline = storm_analysis.getPathOutputTest("test_spliner_psf_2d.spline")
    
    storm_analysis.removeFile(psf)
    storm_analysis.removeFile(spline)

    measurePSF.measurePSF(movie, "", mlist, psf, want2d = True, aoi_size = 5)
    psfToSpline.psfToSpline(psf, spline, 4)
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:11,代码来源:splines_for_fitting.py


示例6: create3DSpline

def create3DSpline():

    movie = storm_analysis.getData("test/data/test_spliner.dax")
    mlist = storm_analysis.getData("test/data/test_spliner_ref.hdf5")
    psf = storm_analysis.getPathOutputTest("test_spliner_psf.psf")
    spline = storm_analysis.getPathOutputTest("test_spliner_psf.spline")

    storm_analysis.removeFile(psf)
    storm_analysis.removeFile(spline)
    
    measurePSF.measurePSF(movie, "", mlist, psf, aoi_size = 6)
    psfToSpline.psfToSpline(psf, spline, 5)
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:12,代码来源:splines_for_fitting.py


示例7: test_align_merge_2

def test_align_merge_2():
    """
    Test aligning and merging two HDF5 files with offset.
    """
    n_locs = 500
    tracks = {"x" : numpy.random.normal(loc = 10.0, scale = 0.2, size = n_locs),
              "y" : numpy.random.normal(loc = 10.0, scale = 0.2, size = n_locs),
              "z" : numpy.random.normal(scale = 0.05, size = n_locs)}

    h5_in1 = storm_analysis.getPathOutputTest("test_align_merge_1.hdf5")
    h5_in2 = storm_analysis.getPathOutputTest("test_align_merge_2.hdf5")
    h5_alm = storm_analysis.getPathOutputTest("test_align_merge_3.hdf5")

    # Create input files.
    t_dx = 2.0
    t_dz = 0.3
    with saH5Py.SAH5Py(h5_in1, is_existing = False, overwrite = True) as h5:
        h5.addMetadata("<xml><field1><data1>1</data1></field></xml>")
        h5.setMovieInformation(20, 20, 2, "")
        h5.setPixelSize(100.0)
        h5.addTracks(tracks)

    with saH5Py.SAH5Py(h5_in2, is_existing = False, overwrite = True) as h5:
        h5.addMetadata("<xml><field1><data1>2</data1></field></xml>")
        h5.setMovieInformation(20, 20, 2, "")
        h5.setPixelSize(100.0)

        tracks["x"] += t_dx
        tracks["z"] += t_dz
        h5.addTracks(tracks)

    # Align and merge with offset.
    storm_analysis.removeFile(h5_alm)
    [dx, dy, dz] = alignAndMerge.alignAndMerge(h5_in1, h5_in2, h5_alm, dx = -t_dx)

    # Check that we got the right offsets.
    assert(numpy.allclose(numpy.array([dx, dy, dz]),
                          numpy.array([-t_dx, 0.0, -t_dz]),
                          atol = 0.001,
                          rtol = 0.1))


    # Check that the output file is correctly aligned.
    with saH5Py.SAH5Py(h5_alm) as h5:
        tracks = h5.getTracks(fields = ["x", "y", "z"])
        assert(numpy.allclose(numpy.array([numpy.std(tracks["x"]),
                                           numpy.std(tracks["y"]),
                                           numpy.std(tracks["z"])]),
                              numpy.array([0.2, 0.2, 0.05]),
                              atol = 0.001,
                              rtol = 0.1))
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:51,代码来源:test_merge.py


示例8: test_pupilfn_3

def test_pupilfn_3():
    """
    Test PF X derivative (C library).
    """
    dx = 1.0e-6
    geo = pupilMath.Geometry(20, 0.1, 0.6, 1.5, 1.4)
    pf = geo.createFromZernike(1.0, [[1.3, 2, 2]])

    pf_c = pfFnC.PupilFunction(geometry = geo)
    pf_c.setPF(pf)
    
    # Calculate derivative of magnitude as a function of x.
    psf_c = pf_c.getPSF()
    psf_c_dx = pf_c.getPSFdx()
    mag_dx_calc = 2.0 * (numpy.real(psf_c)*numpy.real(psf_c_dx) + numpy.imag(psf_c)*numpy.imag(psf_c_dx))

    # Estimate derivative using (f(x+dx) - f(x))/dx
    mag = pupilMath.intensity(psf_c)
    pf_c.translate(dx,0.0,0.0)
    mag_dx_est = (pupilMath.intensity(pf_c.getPSF()) - mag)/dx
                
    if False:
        with tifffile.TiffWriter(storm_analysis.getPathOutputTest("test_pupilfn_3.tif")) as tf:
            #tf.save(mag.astype(numpy.float32))
            tf.save(mag_dx_calc.astype(numpy.float32))
            tf.save(mag_dx_est.astype(numpy.float32))
            tf.save(numpy.abs(mag_dx_calc - mag_dx_est).astype(numpy.float32))

    assert numpy.allclose(mag_dx_calc, mag_dx_est, atol = 1.0e-6)
    
    pf_c.cleanup()
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:31,代码来源:test_pupilfn.py


示例9: test_pupilfn_8

def test_pupilfn_8():
    """
    Test that pupilfn.make_pupil_fn.makePupilFunction works as expected.
    """
    pf_size = 30
    zmn = [[1.3, 2, 2]]
    z_offset = -0.3
    
    # Create & save pupil function.
    pf_file = storm_analysis.getPathOutputTest("pf_test.pfn")
    makePupilFn.makePupilFunction(pf_file, pf_size, 0.1, zmn, z_offset = z_offset)

    # Load PF.
    with open(pf_file, "rb") as fp:
        pf_data = pickle.load(fp)
        test_pf = pf_data["pf"]

    # Create comparison PF.
    geo = pupilMath.GeometrySim(pf_size,
                                pf_data["pixel_size"],
                                pf_data["wavelength"],
                                pf_data["immersion_index"],
                                pf_data["numerical_aperture"])
    ref_pf = geo.createFromZernike(1.0, zmn)

    # Normalize reference to also have height 1.0 (at z = 0.0).
    psf = pupilMath.intensity(pupilMath.toRealSpace(ref_pf))
    ref_pf = ref_pf * 1.0/math.sqrt(numpy.max(psf))

    # Test that they are the same.
    for z in [-0.2, -0.1, 0.0, 0.1, 0.2]:
        test_psf = pupilMath.intensity(pupilMath.toRealSpace(geo.changeFocus(test_pf, z)))
        ref_psf = pupilMath.intensity(pupilMath.toRealSpace(geo.changeFocus(ref_pf, z - z_offset)))
        #print(numpy.max(numpy.abs(test_psf - ref_psf)))
        assert numpy.allclose(test_psf, ref_psf)
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:35,代码来源:test_pupilfn.py


示例10: test_pupilfn_2

def test_pupilfn_2():
    """
    Test PF translation.
    """
    dx = 0.5
    dy = 0.25
    dz = 0.2
    geo = pupilMath.Geometry(20, 0.1, 0.6, 1.5, 1.4)
    pf = geo.createFromZernike(1.0, [[1.3, 2, 2]])

    pf_c = pfFnC.PupilFunction(geometry = geo)
    pf_c.setPF(pf)

    pf_c.translate(dx, dy, dz)
    psf_c = pupilMath.intensity(pf_c.getPSF())

    defocused = geo.changeFocus(pf, dz)
    translated = geo.translatePf(defocused, dx, dy)
    psf_py = pupilMath.intensity(pupilMath.toRealSpace(translated))

    if False:
        with tifffile.TiffWriter(storm_analysis.getPathOutputTest("test_pupilfn_2.tif")) as tf:
            tf.save(psf_c.astype(numpy.float32))
            tf.save(psf_py.astype(numpy.float32))

    assert numpy.allclose(psf_c, psf_py)
            
    pf_c.cleanup()
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:28,代码来源:test_pupilfn.py


示例11: test_frc

def test_frc():
    mlist_name = storm_analysis.getData("test/data/test_drift_mlist.bin")
    results_name = storm_analysis.getPathOutputTest("test_drift_frc.txt")

    from storm_analysis.frc.frc_calc2d import frcCalc2d

    frcCalc2d(mlist_name, results_name, False)
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:7,代码来源:test_frc.py


示例12: test_pupilfn_7

def test_pupilfn_7():
    """
    Test that PF translation is correct (i.e. independent of size).
    """
    sizes = [10, 20, 40]
    dx = 1.0

    for size in sizes:
        geo = pupilMath.Geometry(size, 0.1, 0.6, 1.5, 1.4)
        pf = geo.createFromZernike(1.0, [[1.3, 2, 2]])

        pf_c = pfFnC.PupilFunction(geometry = geo)
        pf_c.setPF(pf)
        
        psf_untranslated = numpy.roll(pupilMath.intensity(pf_c.getPSF()), 1, axis = 0)
            
        pf_c.translate(dx, 0.0, 0.0)
        psf_translated = pupilMath.intensity(pf_c.getPSF())

        if False:
            with tifffile.TiffWriter(storm_analysis.getPathOutputTest("test_pupilfn_7.tif")) as tf:
                tf.save(psf_untranslated.astype(numpy.float32))
                tf.save(psf_translated.astype(numpy.float32))

        assert numpy.allclose(psf_untranslated, psf_translated)
            
        pf_c.cleanup()
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:27,代码来源:test_pupilfn.py


示例13: test_cl_sa_h5py_6

def test_cl_sa_h5py_6():
    """
    Test getting all of the tracks for clustering.
    """
    tracks = {"category" : numpy.arange(4, dtype = numpy.int32),
              "x" : numpy.arange(4, dtype = numpy.float),
              "y" : numpy.arange(4, dtype = numpy.float),
              "z" : numpy.arange(4, dtype = numpy.float)}

    filename = "test_clusters_sa_h5py.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write tracks data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.setMovieInformation(1,1,2,"")
        h5.setPixelSize(100.0)
        h5.addTracks(tracks)
        h5.addTracks(tracks)

    # Test getting all the tracking data.
    with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
        [x, y, z, c, cl_dict] = cl_h5.getDataForClustering()
        assert(numpy.allclose(x, cl_dict['loc_id']))
        assert(numpy.allclose(y, cl_dict['loc_id']))
        assert(numpy.allclose(z, cl_dict['loc_id']))
        assert(numpy.allclose(c, cl_dict['loc_id']))
        assert(numpy.allclose(cl_dict['track_id'], numpy.array([0,0,0,0,1,1,1,1])))
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:28,代码来源:test_clusters_sa_h5py.py


示例14: test_cl_sa_h5py_5

def test_cl_sa_h5py_5():
    """
    Test getting all of the localizations for clustering.
    """
    locs = {"category" : numpy.arange(4, dtype = numpy.int32),
            "x" : numpy.arange(4, dtype = numpy.float),
            "y" : numpy.arange(4, dtype = numpy.float)}

    filename = "test_clusters_sa_h5py.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write localization data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.setMovieInformation(1,1,5,"")
        h5.setPixelSize(100.0)
        h5.addLocalizations(locs, 1)
        h5.addLocalizations(locs, 3)

    # Test getting all the localization data.
    with clSAH5Py.SAH5Clusters(h5_name) as cl_h5:
        [x, y, z, c, cl_dict] = cl_h5.getDataForClustering()
        assert(numpy.allclose(x, cl_dict['loc_id']))
        assert(numpy.allclose(y, cl_dict['loc_id']))
        assert(numpy.allclose(z, numpy.zeros(x.size)))
        assert(numpy.allclose(c, cl_dict['loc_id']))
        assert(numpy.allclose(cl_dict['frame'], numpy.array([1,1,1,1,3,3,3,3])))
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:27,代码来源:test_clusters_sa_h5py.py


示例15: test_pupilfn_4

def test_pupilfn_4():
    """
    Test PF X derivative (Python library).
    """
    dx = 1.0e-6
    geo = pupilMath.Geometry(20, 0.1, 0.6, 1.5, 1.4)
    pf = geo.createFromZernike(1.0, [[1.3, 2, 2]])
    
    # Calculate derivative of magnitude as a function of x.
    psf_py = pupilMath.toRealSpace(pf)
    psf_py_dx = pupilMath.toRealSpace(geo.dx(pf))
    mag_dx_calc = 2.0 * (numpy.real(psf_py)*numpy.real(psf_py_dx) + numpy.imag(psf_py)*numpy.imag(psf_py_dx))

    # Estimate derivative using (f(x+dx) - f(x))/dx
    mag = pupilMath.intensity(psf_py)
    translated = geo.translatePf(pf, dx, 0.0)
    mag_dx_est = (pupilMath.intensity(pupilMath.toRealSpace(translated)) - mag)/dx
        
    if False:
        with tifffile.TiffWriter(storm_analysis.getPathOutputTest("test_pupilfn_4.tif")) as tf:
            #tf.save(mag.astype(numpy.float32))
            tf.save(mag_dx_calc.astype(numpy.float32))
            tf.save(mag_dx_est.astype(numpy.float32))
            tf.save(numpy.abs(mag_dx_calc - mag_dx_est).astype(numpy.float32))

    assert numpy.allclose(mag_dx_calc, mag_dx_est, atol = 1.0e-6)
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:26,代码来源:test_pupilfn.py


示例16: test_tracker_6

def test_tracker_6():
    """
    Test max_gap parameter.
    """
    peaks = {"x" : numpy.array([1.0, 2.0, 3.0]),
             "y" : numpy.array([1.0, 1.0, 1.0]),
             "sum" : numpy.array([4.0, 4.0, 4.0])}

    filename = "test_sa_hdf5.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        h5.addLocalizations(peaks, 0)
        h5.addLocalizations(peaks, 2)
        h5.addMovieInformation(FakeReader(n_frames = 3))

    # Track.
    tracker.tracker(h5_name, radius = 0.1)

    # Tracking.
    with saH5Py.SAH5Py(h5_name) as h5:
        assert(h5.getNTracks() == 6)
        for t in h5.tracksIterator():
            assert(numpy.allclose(t["track_length"], numpy.ones(6)))

    # Redo the tracking allowing single frame gaps.
    tracker.tracker(h5_name, max_gap = 1, radius = 0.1)

    with saH5Py.SAH5Py(h5_name) as h5:
        assert(h5.getNTracks() == 3)
        for t in h5.tracksIterator():
            assert(numpy.allclose(t["track_length"], 2.0*numpy.ones(3)))
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:34,代码来源:test_tracker.py


示例17: test_fiducials_3

def test_fiducials_3():
    """
    Basic fiducials test.
    """
    peaks = {"x" : numpy.array([1.0, 2.0, 3.0]),
             "y" : numpy.array([1.0, 1.0, 1.0])}

    filename = "test_fiducials.hdf5"
    h5_name = storm_analysis.getPathOutputTest(filename)
    storm_analysis.removeFile(h5_name)

    # Write data.
    with saH5Py.SAH5Py(h5_name, is_existing = False) as h5:
        for i in range(3):
            temp = {}
            for elt in peaks:
                temp[elt] = peaks[elt][i:]
            h5.addLocalizations(temp, i)

        h5.addMovieInformation(FakeReader(n_frames = 4))
        
    # Track fiducials..
    fiducials.trackFiducials(h5_name, radius = 0.1, reference_frame = 2)

    # Check.
    with saH5Py.SAH5Py(h5_name) as h5:
        expected = numpy.array([-1,-1,0])
        for fnum, locs in h5.localizationsIterator(fields = ["fiducial_id"]):
            assert numpy.allclose(locs["fiducial_id"], expected[fnum:])
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:29,代码来源:test_fiducials.py


示例18: test_psf_fft2

def test_psf_fft2():
    """
    Test translated PSF calculation.
    """
    dx = 0.5
    dy = 0.25
    dz = 0.2
    [pf_psf, geo, pf] = makePSFAndPF(-0.4, 0.4, 0.05)
    
    pfft = psfFFTC.PSFFFT(pf_psf)
    pfft.translate(dy, dx, dz*(pf_psf.shape[0] - 1)/0.8)
    psf_fft = pfft.getPSF()
    
    defocused = geo.changeFocus(pf, dz)
    translated = geo.translatePf(defocused, dx, dy)
    psf_pf = pupilMath.intensity(pupilMath.toRealSpace(translated))
    
    if False:
        print(numpy.max(numpy.abs(psf_fft - psf_pf)))
        with tifffile.TiffWriter(storm_analysis.getPathOutputTest("test_psf_fft2.tif")) as tf:
            tf.save(psf_fft.astype(numpy.float32))
            tf.save(psf_pf.astype(numpy.float32))

    assert (numpy.max(numpy.abs(psf_fft - psf_pf))) < 1.0e-10

    pfft.cleanup()
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:26,代码来源:test_psf_fft.py


示例19: test_psf_fft7

def test_psf_fft7():
    """
    Test against the Python version, translation.
    """
    dx = 0.5
    dy = 0.25
    dz = 0.2
    [pf_psf, geo, pf] = makePSFAndPF(-0.4, 0.4, 0.05)
    
    pfft_c = psfFFTC.PSFFFT(pf_psf)
    pfft_py = psfFFTPy.PSFFFT(pf_psf)

    pfft_c.translate(dx, dy, dz)
    pfft_py.translate(dx, dy, dz)

    psf_c =pfft_c.getPSF()
    psf_py = pfft_py.getPSF()

    if False:
        print(numpy.max(numpy.abs(psf_c - psf_py)))
        with tifffile.TiffWriter(storm_analysis.getPathOutputTest("test_psf_fft7.tif")) as tf:
            tf.save(psf_c.astype(numpy.float32))
            tf.save(psf_py.astype(numpy.float32))

    assert (numpy.max(numpy.abs(psf_c - psf_py))) < 1.0e-6

    pfft_c.cleanup()
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:27,代码来源:test_psf_fft.py


示例20: test_load_mappings_1

def test_load_mappings_1():
    map_test_file = storm_analysis.getPathOutputTest("map.map")

    max_ch = 4
    mappings = {}
    for i in range(1,max_ch):
        j = i
        mappings[str(i) + "_0_x"] = numpy.arange(j,j+2.5,1.0)
        j += 0.1
        mappings[str(i) + "_0_y"] = numpy.arange(j,j+2.5,1.0)
        j += 0.1
        mappings["0_" + str(i) + "_x"] = numpy.arange(j,j+2.5,1.0)
        j += 0.1
        mappings["0_" + str(i) + "_y"] = numpy.arange(j,j+2.5,1.0)

    max_ch -= 1

    with open(map_test_file, 'wb') as fp:
        pickle.dump(mappings, fp)

    mappings = {}
    [xt_0toN, yt_0toN, xt_Nto0, yt_Nto0] = mpUtil.loadMappings(map_test_file, 0)
    assert(xt_0toN[0,0] == 0.0)
    assert(yt_0toN[0,0] == 0.0)
    assert(xt_Nto0[0,0] == 0.0)
    assert(yt_Nto0[0,0] == 0.0)

    assert(abs(xt_0toN[max_ch,2]-5.2) < 1.0e-6)
    assert(abs(yt_0toN[max_ch,2]-5.3) < 1.0e-6)
    assert(abs(xt_Nto0[max_ch,2]-5.0) < 1.0e-6)
    assert(abs(yt_Nto0[max_ch,2]-5.1) < 1.0e-6)
开发者ID:ZhuangLab,项目名称:storm-analysis,代码行数:31,代码来源:test_mp_utilities.py



注:本文中的storm_analysis.getPathOutputTest函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python storm_analysis.removeFile函数代码示例发布时间:2022-05-27
下一篇:
Python utils.get_formatted_message函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap