• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python scipy.asarray函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中scipy.asarray函数的典型用法代码示例。如果您正苦于以下问题:Python asarray函数的具体用法?Python asarray怎么用?Python asarray使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了asarray函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: test3_raster_data_from_array

    def test3_raster_data_from_array(self):
        # A test based on this info;
        # http://en.wikipedia.org/wiki/Esri_grid
        # Let's hope no one edits the data....
        raster = [[-9999, -9999, 5, 2], [-9999, 20, 100, 36],
                  [3, 8, 35, 10], [32, 42, 50, 6],
                  [88, 75, 27, 9], [13, 5, 1, -9999]]
        upper_left_x = 0.
        upper_left_y = 300.
        cell_size = 50.0
        no_data_value = -9999

        # Just outside the midpoint of all sides
        lon = asarray([125, 125, 125, 125, 125, 125])
        lat = asarray([275, 225, 175, 125, 75, 25])

        raster = Raster.from_array(raster, upper_left_x, upper_left_y,
                                   cell_size, no_data_value)
        self.assertEqual(raster.ul_x, 0)
        self.assertEqual(raster.ul_y, 300)
        self.assertEqual(raster.x_pixel, 50)
        self.assertEqual(raster.y_pixel, -50)
        self.assertEqual(raster.x_size, 4)
        self.assertEqual(raster.y_size, 6)

        data = raster.raster_data_at_points(lon, lat)
        self.assertTrue(allclose(data, asarray([5.0, 100.0, 35.0,
                                                50.0, 27.0, 1.0])))

        # testing extent
        min_long, min_lat, max_long, max_lat = raster.extent()
        self.assertEqual(min_long, 0)
        self.assertEqual(min_lat, 0)
        self.assertEqual(max_long, 200)
        self.assertEqual(max_lat, 300)
开发者ID:wcarthur,项目名称:hazimp,代码行数:35,代码来源:test_raster.py


示例2: unique_rows

def unique_rows(arr):
    """Returns a copy of arr with duplicate rows removed.
    
    From Stackoverflow "Find unique rows in numpy.array."
    
    Parameters
    ----------
    arr : :py:class:`Array`, (`m`, `n`). The array to find the unique rows of.
    
    Returns
    -------
    unique : :py:class:`Array`, (`p`, `n`) where `p` <= `m`
        The array `arr` with duplicate rows removed.
    """
    b = scipy.ascontiguousarray(arr).view(
        scipy.dtype((scipy.void, arr.dtype.itemsize * arr.shape[1]))
    )
    try:
        dum, idx = scipy.unique(b, return_index=True)
    except TypeError:
        # Handle bug in numpy 1.6.2:
        rows = [_Row(row) for row in b]
        srt_idx = sorted(range(len(rows)), key=rows.__getitem__)
        rows = scipy.asarray(rows)[srt_idx]
        row_cmp = [-1]
        for k in xrange(1, len(srt_idx)):
            row_cmp.append(rows[k-1].__cmp__(rows[k]))
        row_cmp = scipy.asarray(row_cmp)
        transition_idxs = scipy.where(row_cmp != 0)[0]
        idx = scipy.asarray(srt_idx)[transition_idxs]
    return arr[idx]
开发者ID:rmcgibbo,项目名称:gptools,代码行数:31,代码来源:utils.py


示例3: frombounds

 def frombounds(
     cls, func, lbound, ubound, npop, crossover_rate=0.5, scale=None, strategy=("rand", 2, "bin"), eps=1e-6
 ):
     lbound = sp.asarray(lbound)
     ubound = sp.asarray(ubound)
     pop0 = rand(npop, len(lbound)) * (ubound - lbound) + lbound
     return cls(func, pop0, crossover_rate=crossover_rate, scale=scale, strategy=strategy, eps=eps)
开发者ID:hksonngan,项目名称:mytesgnikrow,代码行数:7,代码来源:de.py


示例4: triplot

def triplot(vertices, indices, labels=False):
    """
    Plot a 2D triangle mesh
    """
    
    vertices,indices = asarray(vertices),asarray(indices)

    #3d tensor [triangle index][vertex index][x/y value]
    triangles = vertices[numpy.ravel(indices),:].reshape((indices.shape[0],3,2))
    
    col = matplotlib.collections.PolyCollection(triangles)
    col.set_facecolor('grey')
    col.set_alpha(0.5)
    col.set_linewidth(1)

    #sub =  subplot(111)
    sub = matplotlib.pylab.gca()
    sub.add_collection(col,autolim=True)
    matplotlib.pylab.axis('off')
    sub.autoscale_view()

    if labels:
        barycenters = numpy.average(triangles,axis=1)
        for n,bc in enumerate(barycenters):
            matplotlib.pylab.text(bc[0], bc[1], str(n), {'color' : 'k', 'fontsize' : 8,
                                                         'horizontalalignment' : 'center',
                                                         'verticalalignment' : 'center'
                                                         })
开发者ID:DongliangGao,项目名称:pydec,代码行数:28,代码来源:draw.py


示例5: compute_mean_vector

def compute_mean_vector(category_name, labellist, layer = 'fc8'):
    print category_name
    featurefile_list = glob.glob('%s/%s/*.mat' %(featurefilepath, category_name))
    
    # gather all the training samples for which predicted category
    # was the category under consideration
    correct_features = []
    for featurefile in featurefile_list:
        try:
            img_arr = loadmat(featurefile)
            predicted_category = labellist[img_arr['scores'].argmax()]
            if predicted_category == category_name:
                correct_features += [img_arr[layer]]
        except TypeError:
            continue
    
    # Now compute channel wise mean vector
    channel_mean_vec = []
    for channelid in range(correct_features[0].shape[0]):
        channel = []
        for feature in correct_features:
            channel += [feature[channelid, :]]
        channel = sp.asarray(channel)
        assert len(correct_features) == channel.shape[0]
        # Gather mean over each channel, to get mean channel vector
        channel_mean_vec += [sp.mean(channel, axis=0)]

    # this vector contains mean computed over correct classifications
    # for each channel separately
    channel_mean_vec = sp.asarray(channel_mean_vec)
    savemat('%s.mat' %category_name, {'%s'%category_name: channel_mean_vec})
开发者ID:abhijitbendale,项目名称:OSDN,代码行数:31,代码来源:MAV_Compute.py


示例6: ZYFF

def ZYFF(Te, EIJ):
    """Computes `ZY` and `FF`, used in other functions.
    
    If `EIJ` is a scalar, the output has the same shape as `Te`. If `EIJ` is an
    array, the output has shape `EIJ.shape` + `Te.shape`. This should keep the
    output broadcastable with `Te`.
    
    Parameters
    ----------
    Te : array of float
        Electron temperature. Shape is arbitrary.
    EIJ : scalar float or array of float
        Energy difference.
    """
    # Expand the dimensions of EIJ to produce the desired output shape:
    Te = scipy.asarray(Te, dtype=float)
    EIJ = scipy.asarray(EIJ, dtype=float)
    for n in xrange(Te.ndim):
        EIJ = scipy.expand_dims(EIJ, axis=-1)
    
    ZY = EIJ / (1e3 * Te)
    
    FF = scipy.zeros_like(ZY)
    mask = (ZY >= 1.5)
    FF[mask] = scipy.log((ZY[mask] + 1) / ZY[mask]) - (0.36 + 0.03 * scipy.sqrt(ZY[mask] + 0.01)) / (ZY[mask] + 1)**2
    mask = ~mask
    FF[mask] = scipy.log((ZY[mask] + 1) / ZY[mask]) - (0.36 + 0.03 / scipy.sqrt(ZY[mask] + 0.01)) / (ZY[mask] + 1)**2
    
    return ZY, FF
开发者ID:markchil,项目名称:bayesimp,代码行数:29,代码来源:lines.py


示例7: computeOpenMaxProbability

def computeOpenMaxProbability(openmax_fc8, openmax_score_u):
    """ Convert the scores in probability value using openmax
    
    Input:
    ---------------
    openmax_fc8 : modified FC8 layer from Weibull based computation
    openmax_score_u : degree

    Output:
    ---------------
    modified_scores : probability values modified using OpenMax framework,
    by incorporating degree of uncertainity/openness for a given class
    
    """
    prob_scores, prob_unknowns = [], []
    for channel in range(NCHANNELS):
        channel_scores, channel_unknowns = [], []
        for category in range(NCLASSES):
            channel_scores += [sp.exp(openmax_fc8[channel, category])]
                    
        total_denominator = sp.sum(sp.exp(openmax_fc8[channel, :])) + sp.exp(sp.sum(openmax_score_u[channel, :]))
        prob_scores += [channel_scores/total_denominator ]
        prob_unknowns += [sp.exp(sp.sum(openmax_score_u[channel, :]))/total_denominator]
        
    prob_scores = sp.asarray(prob_scores)
    prob_unknowns = sp.asarray(prob_unknowns)

    scores = sp.mean(prob_scores, axis = 0)
    unknowns = sp.mean(prob_unknowns, axis=0)
    modified_scores =  scores.tolist() + [unknowns]
    assert len(modified_scores) == 1001
    return modified_scores
开发者ID:abhijitbendale,项目名称:OSDN,代码行数:32,代码来源:compute_openmax.py


示例8: test_Epicentral

    def test_Epicentral(self):
        dist = Distances(None,None,None,None,None,None,None,None,None,None,None)
        
        distance_type='Epicentral'

        rupture_centroid_lat=asarray((-31.0))
        rupture_centroid_lon=asarray((116.0))
        
        site_lat=asarray((-31,-32,-33,-34))        
        site_lon=asarray((116.0,116.0,116.0,116.0))

        distance=dist.raw_distances(site_lat,
                                    site_lon,
                                    rupture_centroid_lat,
                                    rupture_centroid_lon,
                                    lengths,
                                    azimuths,
                                    widths,
                                    dips,
                                    depths,
                                    depths_to_top,
                                    distance_type,
                                    projection)


        d=asarray((0,1,2,3))*(1.852*60)
        d=d[:,newaxis]
        d2=As_The_Cockey_Flies(rupture_centroid_lat,
                               rupture_centroid_lon,
                               site_lat,
                               site_lon)

        assert allclose(d,distance,rtol=0.001)
        assert allclose(d,d2)
开发者ID:dynaryu,项目名称:eqrm,代码行数:34,代码来源:test_distance_functions.py


示例9: test_Epicentral2

    def test_Epicentral2(self):
        dist = Distances(None,None,None,None,None,None,None,None,None,None,None)
        
        distance_type='Epicentral'
        rupture_centroid_lat=asarray((-31.0,-33,-36))
        rupture_centroid_lon=asarray((116.0,118,222))
        

        site_lat=asarray((-31,-32,-33,-34))        
        site_lon=asarray((116.0,116.0,116.0,116.0))

        distance=dist.raw_distances(site_lat,
                                    site_lon,
                                    rupture_centroid_lat,
                                    rupture_centroid_lon,
                                    lengths,
                                    azimuths,
                                    widths,
                                    dips,
                                    depths,
                                    depths_to_top,
                                    distance_type,
                                    projection)
        

        
        d = As_The_Cockey_Flies(rupture_centroid_lat,
                                rupture_centroid_lon,
                                site_lat,
                                site_lon)

        assert allclose(d,distance,rtol=0.1)
开发者ID:dynaryu,项目名称:eqrm,代码行数:32,代码来源:test_distance_functions.py


示例10: draw_regions

def draw_regions(original_image, mask_image, outline=["green", "blue", "red"],
                 fill=[None, None, None], mode_pass=[False, False, False]):
    heigth = original_image.size[1]
    regions = find_regions(mask_image)
    filtered_regions = filter_regions(regions,
                                      noise_heigth=NOISE_FACTOR * heigth)
    out = original_image.convert("RGB")
    for i, region in enumerate(filtered_regions):
        if mode_pass[i]:
            widths = scipy.asarray([f.width() for f in filtered_regions[i]])
            heigths = scipy.asarray([f.heigth() for f in filtered_regions[i]])
            mode_width = float(scipy.stats.mstats.mode(widths)[0])
            mode_heigth = float(scipy.stats.mstats.mode(heigths)[0])
        if outline[i] or fill[i]:
            draw = ImageDraw.Draw(out)
            for r in filtered_regions[i]:
                if (not mode_pass[i]
                    or (mode_pass[i]
                        and (mode_width - mode_pass[i] <= r.width() \
                             <= mode_width + mode_pass[i]
                             or mode_heigth - mode_pass[i] <= r.heigth() \
                             <= mode_heigth + mode_pass[i]))):
                    draw.rectangle(r.box(), outline=outline[i], fill=fill[i])
            del draw
    return out
开发者ID:versae,项目名称:transfer,代码行数:25,代码来源:utils.py


示例11: test_As_The_Cockey_Flies

 def test_As_The_Cockey_Flies(self):
     # Test data from GA website 
     # As_The_Cockey_Flies implements the Great Circle method
     # 
     # http://www.ga.gov.au/earth-monitoring/geodesy/geodetic-techniques/distance-calculation-algorithms.html
     rupture_centroid_lat = asarray((-30))
     rupture_centroid_lon = asarray((150))
     
     site_lat = asarray((-31,-31,-32,-33,-34,-35,-40,-50,-60,-70,-80))
     site_lon = asarray((150,151,151,151,151,151,151,151,151,151,151))
     
     expected = asarray([[111.120],
                         [146.677],
                         [241.787],
                         [346.556],
                         [454.351],
                         [563.438],
                         [1114.899],
                         [2223.978],
                         [3334.440],
                         [4445.247],
                         [5556.190]])
     
     d = As_The_Cockey_Flies(rupture_centroid_lat,
                             rupture_centroid_lon,
                             site_lat,
                             site_lon)
     
     assert allclose(d,expected)
开发者ID:dynaryu,项目名称:eqrm,代码行数:29,代码来源:test_distance_functions.py


示例12: ref_indicator

    def ref_indicator(self, coord):
        """
        Return the value of the indicator for the reference coordinates if appropriate.

        NOTE: Currently we will simply implement the reference discretization as non-overlapping boxes.
        """
        # create a distance vector
        distancevec = sp.asarray(coord) - sp.asarray(self.ref_center)

        # if any collective variable is periodic, construct dr, the adjuct for minimum image convetion for the periodic cv's
        if self.wrapping is not None:

            # build dr
            dr = np.zeros(distancevec.shape)

            # add values to dr if the CV wraps
            for i in xrange(len(self.wrapping)):
                if self.wrapping[i] != 0.0:
                    # This is an old trick from MD codes to find the minimum distance between two points.
                    dr[i] = self.wrapping[i] * np.rint(distancevec[i]/self.wrapping[i])

            # add min image vector
            distancevec -= dr

        # We return 1.0 if all the distances are smaller than the width of the box from the center, 0.0 otherwise.
        return float(np.prod(self.ref_width > np.abs(distancevec)))
开发者ID:jtempkin,项目名称:enhanced_sampling_toolkit,代码行数:26,代码来源:basisFunctions.py


示例13: xcorrv

def xcorrv(a, b=None, lag=None, dtype=None):
    """vectorial cross correlation by taking the expectation over an outer product"""

    # checks
    a = sp.asarray(a)
    b = sp.asarray(b or a)
    if not (a.ndim == b.ndim):
        raise ValueError('a.ndim !== b.ndim')

    #if a.size != b.size:
    #    raise ValueError('a.size != b.size')
    #if a.size < 2:
    #    raise ValueError('a.size < 2')

    if lag is None:
        lag = int(a.shape[0] - 1)
    if lag > a.shape[0] - 1:
        raise ValueError('lag > vector len - 1')

    # init
    lag_range = xrange(int(-lag), int(lag) + 1)
    rval = sp.empty((a.shape[1], b.shape[1], len(lag_range)), dtype=dtype or a.dtype)

    # calc
    for tau in lag_range:
        prod = a.T[:, None, max(0, +tau):min(len(a), len(a) + tau)] * \
               b.T[None, :, max(0, -tau):min(len(b), len(b) - tau)].conj()
        rval[..., lag + tau] = prod.mean(axis=-1)

    # return
    return rval
开发者ID:pmeier82,项目名称:BOTMpy,代码行数:31,代码来源:funcs_general.py


示例14: test2_raster_data_at_points

    def test2_raster_data_at_points(self):
        # Write a file to test
        f = tempfile.NamedTemporaryFile(suffix='.aai',
                                        prefix='test_misc',
                                        delete=False)
        f.write('ncols 3   \r\n')
        f.write('nrows 2 \r\n')
        f.write('xllcorner +0.   \r\n')
        f.write('yllcorner +8. \r\n')
        f.write('cellsize 1   \r\n')
        f.write('NODATA_value -9999 \r\n')
        f.write('1 2 -9999   \r\n')
        f.write('4 5 6')
        f.close()
        # lon 0 - 3
        # lat 8 - 10

        # Just outside the midpoint of all sides
        lon = asarray([-0.0001, 1.5, 3.0001, 1.5])
        lat = asarray([9., 10.00001, 9.0, 7.99999])
        raster = Raster.from_file(f.name)
        data = raster.raster_data_at_points(lon, lat)
        self.assertTrue(numpy.all(numpy.isnan(data)))

        # Inside lower left corner of No data cell

        lon = asarray([2.0001])
        lat = asarray([9.000019])
        raster = Raster.from_file(f.name)
        data = raster.raster_data_at_points(lon, lat)
        self.assertTrue(numpy.all(numpy.isnan(data)))

        os.remove(f.name)
开发者ID:wcarthur,项目名称:hazimp,代码行数:33,代码来源:test_raster.py


示例15: __call__

 def __call__(self, Xi, Xj, ni, nj, hyper_deriv=None, symmetric=False):
     """Evaluate the covariance between points `Xi` and `Xj` with derivative order `ni`, `nj`.
     
     Parameters
     ----------
     Xi : :py:class:`Matrix` or other Array-like, (`M`, `N`)
         `M` inputs with dimension `N`.
     Xj : :py:class:`Matrix` or other Array-like, (`M`, `N`)
         `M` inputs with dimension `N`.
     ni : :py:class:`Matrix` or other Array-like, (`M`, `N`)
         `M` derivative orders for set `i`.
     nj : :py:class:`Matrix` or other Array-like, (`M`, `N`)
         `M` derivative orders for set `j`.
     hyper_deriv : Non-negative int or None, optional
         The index of the hyperparameter to compute the first derivative
         with respect to. If None, no derivatives are taken. Hyperparameter
         derivatives are not supported at this point. Default is None.
     symmetric : bool, optional
         Whether or not the input `Xi`, `Xj` are from a symmetric matrix.
         Default is False.
     
     Returns
     -------
     Kij : :py:class:`Array`, (`M`,)
         Covariances for each of the `M` `Xi`, `Xj` pairs.
     
     Raises
     ------
     NotImplementedError
         If the `hyper_deriv` keyword is not None.
     """
     if hyper_deriv is not None:
         raise NotImplementedError("Hyperparameter derivatives have not been implemented!")
     n_cat = scipy.asarray(scipy.concatenate((ni, nj), axis=1), dtype=int)
     X_cat = scipy.asarray(scipy.concatenate((Xi, Xj), axis=1), dtype=float)
     n_cat_unique = unique_rows(n_cat)
     k = scipy.zeros(Xi.shape[0], dtype=float)
     # Loop over unique derivative patterns:
     if self.num_proc > 1:
         pool = multiprocessing.Pool(processes=self.num_proc)
     for n_cat_state in n_cat_unique:
         idxs = scipy.where(scipy.asarray((n_cat == n_cat_state).all(axis=1)).squeeze())[0]
         if (n_cat_state == 0).all():
             k[idxs] = self.cov_func(Xi[idxs, :], Xj[idxs, :], *self.params)
         else:
             if self.num_proc > 1 and len(idxs) > 1:
                 k[idxs] = scipy.asarray(
                     pool.map(_ArbitraryKernelEval(self, n_cat_state), X_cat[idxs, :]),
                     dtype=float
                 )
             else:
                 for idx in idxs:
                     k[idx] = mpmath.chop(mpmath.diff(self._mask_cov_func,
                                                      X_cat[idx, :],
                                                      n=n_cat_state,
                                                      singular=True))
     
     if self.num_proc > 0:
         pool.close()
     return k
开发者ID:pennajm,项目名称:gptools,代码行数:60,代码来源:core.py


示例16: __init__

  def __init__(self, mixcoeffs, means, covs):
    self.mixcoeffs = scipy.asarray(mixcoeffs)
    self.means = scipy.asarray(means)
    self.covs = scipy.asarray(covs)

    self.degree = self.mixcoeffs.shape[0]
    self.dim = self.covs.shape[1]
开发者ID:bayerj,项目名称:theano-mog,代码行数:7,代码来源:gaussianmixture.py


示例17: trimesh

def trimesh(vertices, indices, labels=False):
    """
    Plot a 2D triangle mesh
    """
    from scipy import asarray
    from matplotlib import collections
    from pylab import gca, axis, text
    from numpy import average
    
    vertices,indices = asarray(vertices),asarray(indices)

    #3d tensor [triangle index][vertex index][x/y value]
    triangles = vertices[indices.ravel(),:].reshape((indices.shape[0],3,2))
    
    col = collections.PolyCollection(triangles)
    col.set_facecolor('grey')
    col.set_alpha(0.5)
    col.set_linewidth(1)

    sub = gca()
    sub.add_collection(col,autolim=True)
    axis('off')
    sub.autoscale_view()

    if labels:
        barycenters = average(triangles,axis=1)
        for n,bc in enumerate(barycenters):
            text(bc[0], bc[1], str(n), {'color' : 'k', 'fontsize' : 8,
                                        'horizontalalignment' : 'center',
                                        'verticalalignment' : 'center'})
开发者ID:VfifthV,项目名称:pyamg-examples,代码行数:30,代码来源:helper.py


示例18: set_data

    def set_data(self, data, events=None, gtruth=None):
        """update the plot with new chunk of data

        :Parameters:
            data : ndarray
                A 1d array with data to show.
            events : ndarray
                A 1d array of event times to show.
        """

        # data
        self._data.setData(N.arange(data.size), data)

        # events
        if events is None:
            ev = N.zeros(0)
        else:
            ev = N.asarray(events)
        self._events.setData(ev, N.ones(ev.size) * self.range)

        # gtruth
        if gtruth is None:
            gt = N.zeros(0)
        else:
            gt = N.asarray(list(gtruth))
        self._gtruth.setData(gt, N.ones(gt.size) * -self.range)

        # replot
        if self.replot_on_update is True:
            self.replot()
开发者ID:mtambos,项目名称:Neural-Simulation,代码行数:30,代码来源:plotting.py


示例19: recalibrate_scores

def recalibrate_scores(weibull_model, labellist, imgarr,
                       layer = 'fc8', alpharank = 10, distance_type = 'eucos'):
    """ 
    Given FC8 features for an image, list of weibull models for each class,
    re-calibrate scores

    Input:
    ---------------
    weibull_model : pre-computed weibull_model obtained from weibull_tailfitting() function
    labellist : ImageNet 2012 labellist
    imgarr : features for a particular image extracted using caffe architecture
    
    Output:
    ---------------
    openmax_probab: Probability values for a given class computed using OpenMax
    softmax_probab: Probability values for a given class computed using SoftMax (these
    were precomputed from caffe architecture. Function returns them for the sake 
    of convienence)

    """
    
    imglayer = imgarr[layer]
    ranked_list = imgarr['scores'].argsort().ravel()[::-1]
    alpha_weights = [((alpharank+1) - i)/float(alpharank) for i in range(1, alpharank+1)]
    ranked_alpha = sp.zeros(1000)
    for i in range(len(alpha_weights)):
        ranked_alpha[ranked_list[i]] = alpha_weights[i]

    # Now recalibrate each fc8 score for each channel and for each class
    # to include probability of unknown
    openmax_fc8, openmax_score_u = [], []
    for channel in range(NCHANNELS):
        channel_scores = imglayer[channel, :]
        openmax_fc8_channel = []
        openmax_fc8_unknown = []
        count = 0
        for categoryid in range(NCLASSES):
            # get distance between current channel and mean vector
            category_weibull = query_weibull(labellist[categoryid], weibull_model, distance_type = distance_type)
            channel_distance = compute_distance(channel_scores, channel, category_weibull[0],
                                                distance_type = distance_type)

            # obtain w_score for the distance and compute probability of the distance
            # being unknown wrt to mean training vector and channel distances for
            # category and channel under consideration
            wscore = category_weibull[2][channel].w_score(channel_distance)
            modified_fc8_score = channel_scores[categoryid] * ( 1 - wscore*ranked_alpha[categoryid] )
            openmax_fc8_channel += [modified_fc8_score]
            openmax_fc8_unknown += [channel_scores[categoryid] - modified_fc8_score ]

        # gather modified scores fc8 scores for each channel for the given image
        openmax_fc8 += [openmax_fc8_channel]
        openmax_score_u += [openmax_fc8_unknown]
    openmax_fc8 = sp.asarray(openmax_fc8)
    openmax_score_u = sp.asarray(openmax_score_u)
    
    # Pass the recalibrated fc8 scores for the image into openmax    
    openmax_probab = computeOpenMaxProbability(openmax_fc8, openmax_score_u)
    softmax_probab = imgarr['scores'].ravel() 
    return sp.asarray(openmax_probab), sp.asarray(softmax_probab)
开发者ID:abhijitbendale,项目名称:OSDN,代码行数:60,代码来源:compute_openmax.py


示例20: sinc_interp1d

def sinc_interp1d(x, s, r):
    """Interpolates `x`, sampled at times `s`
    Output `y` is sampled at times `r`

    inspired from from Matlab:
    http://phaseportrait.blogspot.com/2008/06/sinc-interpolation-in-matlab.html

    :param ndarray x: input data time series
    :param ndarray s: input sampling time series (regular sample interval)
    :param ndarray r: output sampling time series
    :return ndarray: output data time series (regular sample interval)
    """

    # init
    s = sp.asarray(s)
    r = sp.asarray(r)
    x = sp.asarray(x)
    if x.ndim == 1:
        x = sp.atleast_2d(x)
    else:
        if x.shape[0] == len(s):
            x = x.T
        else:
            if x.shape[1] != s.shape[0]:
                raise ValueError('x and s must be same temporal extend')
    if sp.allclose(s, r):
        return x.T
    T = s[1] - s[0]

    # resample
    sincM = sp.tile(r, (len(s), 1)) - sp.tile(s[:, sp.newaxis], (1, len(r)))
    return sp.vstack([sp.dot(xx, sp.sinc(sincM / T)) for xx in x]).T
开发者ID:pmeier82,项目名称:BOTMpy,代码行数:32,代码来源:spike_alignment.py



注:本文中的scipy.asarray函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python scipy.atleast_2d函数代码示例发布时间:2022-05-27
下一篇:
Python scipy.array函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap