• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python speedups.enable函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中shapely.speedups.enable函数的典型用法代码示例。如果您正苦于以下问题:Python enable函数的具体用法?Python enable怎么用?Python enable使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了enable函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: init

def init():

    # enable Shapely speedups, if possible

    if speedups.available:
        speedups.enable()
    print 'IN;'
开发者ID:scruss,项目名称:hpgl_shapely,代码行数:7,代码来源:hpgl_shapely.py


示例2: adjust_channel_depth

def adjust_channel_depth(grd,shpfile,lcmax=500.):
    """
    Adjusts the depths of a suntans grid object using a line shapefile.

    The shapefile must have an attribute called "contour"
    """
    from shapely import geometry, speedups
    from maptools import readShpPointLine

    if speedups.available:
        speedups.enable()

    print 'Adjusting depths in channel regions with a shapefile...'
        
    # Load the shapefile
    xyline,contour = readShpPointLine(shpfile,FIELDNAME='contour')
    
    # Load all of the points into shapely type geometry
    
    # Distance method won't work with numpy array
    #P = geometry.asPoint(xy)
    
    P = [geometry.Point(grd.xv[i],grd.yv[i]) for i in range(grd.Nc)]
    
    L=[]
    for ll in xyline:
        L.append(geometry.asLineString(ll))
     
    nlines = len(L)
    weight_all = np.zeros((grd.Nc,nlines))
    for n in range(nlines):
        print 'Calculating distance from line %d...'%n
        
        dist = [L[n].distance(P[i]) for i in range(grd.Nc)]
        dist = np.array(dist)

        # Calculate the weight from the distance
        weight = -dist/lcmax+1.
        weight[dist>=lcmax]=0.
        
        weight_all[:,n] = weight

    # Now go through and re-calculate the depths
    dv =  grd.dv*(1-weight_all.sum(axis=-1))
    for n in range(nlines):
        dv += weight_all[:,n]*contour[n]
        
    grd.dv=dv   
    return grd
开发者ID:mrayson,项目名称:soda,代码行数:49,代码来源:sundepths.py


示例3: __init__

 def __init__(self, in_dir):
     # path to CSV directory
     self.in_dir = in_dir
     self.file_structure = {
         'AreaCode': {'class_id': 101, 'files': ['AreaCode.csv', 'LocalDataAreaCode.csv']},
         'County': {'class_id': 2, 'files': ['CountySynonyms.csv', 'LocalDataCounty.csv', 'County.csv']},
         'ZipCode': {'class_id': 100, 'files': ['ZipCode.csv', 'LocalDataZipCode.csv']},
         'CMSA': {'class_id': 102, 'files': ['CMSA.csv', 'CMSASynonyms.csv', 'LocalDataCMSA.csv']},
         'State': {'class_id': 1, 'files': ['State.csv', 'StateSynonyms.csv', 'LocalDataState.csv']},
         'City': {'class_id': 10, 'files': ['City.csv', 'CitySynonyms.csv', 'LocalDataCity.csv']},
         'Congress': {'class_id': 103, 'files': ['Congress.csv', 'CongressSynonyms.csv', 'LocalDataCongress.csv']},
         'Country': {'class_id': 0, 'files': ['Country.csv', 'CountrySynonyms.csv', 'LocalDataCountry.csv']},
     }
     self._cursor = self._create_cursor(in_dir)
     speedups.enable()
开发者ID:krdenesh,项目名称:Maps,代码行数:15,代码来源:readers.py


示例4: __init__

    def __init__(self, info_queue: Queue, output_queue: Queue, counter: Counter, exit_event):
        """

        :param info_queue:
        :param multiprocessing.queue.Queue output_queue:
        :param Counter counter: Ze Counter class for counting counts
        """
        super().__init__()
        self.queue = info_queue
        self.output = output_queue
        self.counter = counter
        self.exit_event = exit_event

        self.logging = logging.getLogger(self.name)
        self.total = info_queue.qsize()
        self.t = 2

        if speedups.available:
            speedups.enable()
开发者ID:boerngen-schmidt,项目名称:commuter-simulation,代码行数:19,代码来源:random_point_generator_shapely.py


示例5: append_nuts3_region

def append_nuts3_region(dfin, shapefile_path):
    """ Take a pandas dataframe with lat and long columns and a shapefile.
    Convert coordinates to Shapely points to do a point-in-polygon check
    for each point. Append name of nuts3 region corresponding to
    point-in-polygon to the dataframe.
    This is extremely slow. Needs to be optimized with Shapely boundary box.
    """
    df = dfin.copy()
    df['nuts3'] = np.nan
    fc = fiona.open(shapefile_path)
    speedups.enable()
    for feature in fc:
        prepared_shape = prep(asShape(feature['geometry']))
        for index, row in df.iterrows():
            point = Point(row.loc['long'], row.loc['lat'])
            if prepared_shape.contains(point):
                df.loc[index, 'nuts3name'] = feature['properties']['NUTS315NM']
                df.loc[index, 'nuts3id'] = feature['properties']['NUTS315CD']
    return df
开发者ID:jay-tyler,项目名称:data-petting-zoo,代码行数:19,代码来源:engine.py


示例6: plot_rect

#!/bin/env python
from matplotlib import pyplot

from fabtotum import gerber
from fabtotum.gerber.render import *
from fabtotum.toolpath import *
from fabtotum.gcode import *

from shapely.geometry import Point, Polygon, LineString
from shapely.ops import linemerge

from shapely import speedups
if speedups.available:
	speedups.enable()

BLUE = '#6699cc'
RED =  '#ff0000'
GREEN =  '#00cc00'
GRAY = '#999999'

colors = ['#ff0000','#00cc00', '#0000cc', '#cccc00', '#cc00cc', '#00cccc']

def plot_rect(ax, x,y,w,h, color=GRAY, width=0.5, mirror_x=0):
    if mirror_x:
        x = [x,x-w,x-w,x,x]
        y = [y,y,y+h,y+h,y]
    else:
        x = [x,x+w,x+w,x,x]
        y = [y,y,y+h,y+h,y]
    ax.plot(x, y, color=color, linewidth=width, solid_capstyle='round', zorder=1)
开发者ID:infinity0n3,项目名称:python-fabtotum,代码行数:30,代码来源:holders.py


示例7: subset_CS2

def subset_CS2(cs2file,ccfile,cc=0,version='V001'):
    '''
    subset_cs2
        Takes the CryoSat-2 L2 output from IDL, and the corner coordinate file
        produced by pyoval. The CC file is cut to the size of the CryoSat-2 
        file. Furthermore, if you give it both an AWI and an ESA CS2 file, it 
        makes sure that both have the same points and same extent (in case any
        differences do exist, we use the ESA CS2 file as the standard (since
        the CryoVal project is designed to examine the ESA CS2 data.)
        module: subset_cs2
            parameters:
                cs2file:    the .csv file with the CS2 L2 Baseline C data.
                            this can be an ESA file or AWI CS2 file, or both.
                ccfile:     the .dat file of corner coordinates
                cc:         flag should be set to 1 if you wish to output the 
                            CS2ORB corner coordinate file that is now sized to
                            the extent of the CS2 file.
                version:    processing version code string.
    Author: Justin Beckers
    Author Contact: [email protected]
    
    Verion: 1.0
    Version Notes:
        1.0: Initial release.
        
    Release Date: 2016/03/17
    
    Usage Notes: 
        Case 1: Using Main 
            From commandline/terminal, can simply run:
                >>>python subset_CS2.py
            This executes the program with the parameters set on 
            lines 327 - 349. Modify this section in the script file text to 
            meet your requirements
                if __name__=='__main__':
    '''


    #Import python modules.
    import os
    import numpy as np
    from shapely.geometry import Polygon,Point
    from rtree import index
    from shapely import speedups
    speedups.enable()
    
    #Add a quick converter  to convert longitude to 0/360 from -180/180.
    cv = lambda x: float(x)+360.0 if float(x) < 0.0 else float(x)
    
    #Load in the Cryosat-2 Corner Coordinates file. Convert the Longitude to 
    #0-360
    ccdata = np.genfromtxt(ccfile,converters={6:cv,8:cv,10:cv,12:cv,14:cv})
    
    #Load the Cryosat-2 L2 Data from AWI or ESA. Convert the Coordinates to
    #0-360
    if os.path.basename(cs2file).startswith('CS2AWI'): #If it is an AWI file
        cs2data=np.genfromtxt(cs2file,delimiter=',',skip_header=1,
        missing_values='nan',filling_values=np.nan,converters={1:cv})
    
    else: # It is assumed to be an ESA CS L2 file
        cs2data=np.genfromtxt(cs2file,delimiter=',',skip_header=1,
        missing_values='nan',filling_values=np.nan,converters={10:cv})

    #The AWI CS L2 data contains NaNs in the Lon,Lat, and Time fields that 
    #result from the L1B missing waveforms. These NaN values cause problems for
    #the polygon generation later on, and we don't really need them as they are
    #not in the ESA CS2 output from IDL (already cut out) so they are cut.
    foo=[] #A temporary variable
    #If of the longitude,latitude, or time are NaN, keep a list of the row.
    for j in np.arange(len(cs2data)):
        if (np.isnan(cs2data[j,1]) or np.isnan(cs2data[j,2]) or 
            np.isnan(cs2data[j,0])): 
            foo.append(j)
    cs2data=np.delete(cs2data,foo,axis=0) #Cut the bad rows.
   
    #Calculating a polygon over the Prime Meridian or the International Date 
    #Line can be troublesome (polygons sometimes wrap around the earth in the
    #wrong direction, depending on your coordinate system). Here we check
    #if we cross either the Prime Meridian or the I.D.L., and either convert to
    #0/360 coordinate system (already done), or back to -180/180
    converted=1 #Flag to indicate if we needed to convert the longitudes.
    if os.path.basename(cs2file).startswith('CS2AWI'): #If AWI CS2 file:
        if (np.any(cs2data[:,1] >=359.)==1 and np.any(cs2data[:,1] <=1.)==1 and
            np.any(np.logical_and(179.<cs2data[:,10],cs2data[:,10]<181.))==0):
            print "Crosses Prime Meridian but not IDL. Coordinates will be " 
            print "processed in -180/180 system."
            for i in np.arange(len(cs2data)):
                cs2data[i,1]=deconvert_lon(cs2data[i,1])
            for i in np.arange(len(ccdata)): 
                ccdata[i][6]=deconvert_lon(ccdata[i][6])
                ccdata[i][8]=deconvert_lon(ccdata[i][8])
                ccdata[i][10]=deconvert_lon(ccdata[i][10])
                ccdata[i][12]=deconvert_lon(ccdata[i][12])
                ccdata[i][14]=deconvert_lon(ccdata[i][14])
            converted=0
        elif (np.any(cs2data[:,1] >=359.)==0 and np.any(cs2data[:,1] <=1.)==0 
                and np.any(np.logical_and(
                179.<cs2data[:,10],cs2data[:,10]<181.))==1):
            print "Does not cross prime meridian but crosses IDL. Coordinates " 
            print "will be processed in 0/360 system."
#.........这里部分代码省略.........
开发者ID:SIBeckers,项目名称:pyoval,代码行数:101,代码来源:subset_CS2.py


示例8: packCurves

def packCurves():
	if speedups.available:
		speedups.enable()
	t=time.time()
	packsettings=bpy.context.scene.cam_pack
	
	sheetsizex=packsettings.sheet_x
	sheetsizey=packsettings.sheet_y
	direction=packsettings.sheet_fill_direction
	distance=packsettings.distance
	rotate = packsettings.rotate
	
	polyfield=[]#in this, position, rotation, and actual poly will be stored.
	for ob in bpy.context.selected_objects:
		allchunks=[]
		simple.activate(ob)
		bpy.ops.object.make_single_user(type='SELECTED_OBJECTS')
		bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY')
		z=ob.location.z
		bpy.ops.object.location_clear()
		bpy.ops.object.rotation_clear()

		chunks=utils.curveToChunks(ob)
		npolys=utils.chunksToShapely(chunks)
		#add all polys in silh to one poly
		poly=shapely.ops.unary_union(npolys)
		
		poly=poly.buffer(distance/1.5,8)
		poly=poly.simplify(0.0003)
		polyfield.append([[0,0],0.0,poly,ob,z])
	random.shuffle(polyfield)
	#primitive layout here:
	allpoly=prepared.prep(sgeometry.Polygon())#main collision poly.
	#allpoly=sgeometry.Polygon()#main collision poly.
	
	shift=0.0015#one milimeter by now.
	rotchange=.3123456#in radians
	
	xmin,ymin,xmax,ymax=polyfield[0][2].bounds
	if direction=='X':
		mindist=-xmin
	else:
		mindist=-ymin
	i=0
	p=polyfield[0][2]
	placedpolys=[]
	rotcenter=sgeometry.Point(0,0)
	for pf in polyfield:
		print(i)
		rot=0
		porig=pf[2]
		placed=False
		xmin,ymin,xmax,ymax=p.bounds
		#p.shift(-xmin,-ymin)
		if direction=='X':
			x=mindist
			y=-ymin
		if direction=='Y':
			x=-xmin
			y=mindist
		
		iter=0
		best=None
		hits=0
		besthit=None
		while not placed:
			
			#swap x and y, and add to x
			#print(x,y)
			p=porig
			
			if rotate: 
				#ptrans=srotate(p,rot,0,0)
				ptrans=affinity.rotate(p,rot,origin = rotcenter, use_radians=True)
				#ptrans = translate(ptrans,x,y)
				ptrans = affinity.translate(ptrans,x,y)
			else:
				#ptrans = translate(p,x,y)
				ptrans = affinity.translate(p,x,y)
			xmin,ymin,xmax,ymax=ptrans.bounds
			#print(iter,p.bounds)
			
			if xmin>0 and ymin>0 and ((direction=='Y' and xmax<sheetsizex) or (direction=='X' and ymax<sheetsizey)):
				if not allpoly.intersects(ptrans):
				#if allpoly.disjoint(ptrans):
					#print('gothit')
					#we do more good solutions, choose best out of them:
					hits+=1
					if best==None:
						best=[x,y,rot,xmax,ymax]
						besthit=hits
					if direction=='X':
						if xmax<best[3]:
							best=[x,y,rot,xmax,ymax]
							besthit=hits
					elif ymax<best[4]:
						best=[x,y,rot,xmax,ymax]
						besthit=hits
					

#.........这里部分代码省略.........
开发者ID:AndreKravcenko,项目名称:blendercam,代码行数:101,代码来源:pack.py


示例9: rank

def rank():
    su.enable()    
    
    index = rtree.index.Index()
    house_properties = []
    with open("geo_value_clp.json", "r") as house_file:
        houses = geojson.loads(house_file.read())
        for ix, feature in enumerate(houses['features']):
            point = sg.shape(feature['geometry'])
            index.insert(id=ix, 
                         coordinates=(point.bounds))
            props = feature['properties']
            props['geom'] = point
            props['trees'] = 0              
            house_properties.append(props)
            if ix % 10000 == 0:
                print ix
    del houses
            
    with fiona.collection("sfutc_m.shp", "r") as tree_file:  
        tree_polys = [sg.shape(feature['geometry']) for feature in tree_file]
    
    for ix, poly in enumerate(tree_polys):
        if not poly.is_valid:            
            # try to fix invalidities
            poly = poly.buffer(0)
            poly = so.unary_union(poly)
            if not poly.is_valid:
                continue
        # let canopy 'radiate' on surrounding houses
        size_factor = math.sqrt(poly.area) / 3.14         
        #print size_factor
        bpoly = poly.buffer(50 + math.sqrt(size_factor), resolution=8)
        # first get rough estimate of houses
        for hit in index.intersection(bpoly.bounds):
            point = house_properties[hit]['geom']
            # chech more thoroughly
            if bpoly.contains(point):
                house_properties[hit]['trees'] += size_factor 
        if ix % 500 == 0:
                print ix
    del index
    del tree_polys
            
    geo_features = []
    print 'houses to convert: ' + str(len(house_properties))
    for idx, house in enumerate(house_properties):
        if idx % 10000 == 0:
            print idx
        point = house['geom']
        geometry = geojson.Point((point.x, point.y))
        properties = {'addr': house['addr'], 
                      're': house['re'],
                      'rei': house['rei'],
                      'trs': house['trees']}
        gj_house = geojson.Feature(geometry=geometry, properties=properties)
        geo_features.append(gj_house)
    print 'writing geojson'
    gj = geojson.FeatureCollection(geo_features) 
    dump = geojson.dumps(gj)
    with open('geo_value_trees.json', 'w') as f:
        f.write(dump)
    print 'done'
    del gj
    del geo_features
    del dump    
    return
开发者ID:stefanborchardt,项目名称:SFTrees,代码行数:67,代码来源:spatial.py


示例10: write_file

    def write_file(filename, wkt, **attr):

        from xml.etree import ElementTree as et

        # Create an SVG XML element
        # @ToDo: Allow customisation of height/width
        iheight = 74
        height = str(iheight)
        iwidth = 74
        width = str(iwidth)
        doc = et.Element("svg", width=width, height=height, version="1.1", xmlns="http://www.w3.org/2000/svg")

        # Convert WKT
        from shapely.wkt import loads as wkt_loads
        try:
            # Enable C-based speedups available from 1.2.10+
            from shapely import speedups
            speedups.enable()
        except:
            from ..s3utils import s3_debug
            s3_debug("S3GIS", "Upgrade Shapely for Performance enhancements")

        shape = wkt_loads(wkt)

        geom_type = shape.geom_type
        if geom_type not in ("MultiPolygon", "Polygon"):
            error = "Unsupported Geometry: %s" % geom_type
            from ..s3utils import s3_debug
            s3_debug(error)
            return

        # Scale Points & invert Y axis
        from shapely import affinity
        bounds = shape.bounds # (minx, miny, maxx, maxy)
        swidth = abs(bounds[2] - bounds[0])
        sheight = abs(bounds[3] - bounds[1])
        width_multiplier = iwidth / swidth
        height_multiplier = iheight / sheight
        multiplier = min(width_multiplier, height_multiplier) * 0.9 # Padding
        shape = affinity.scale(shape, xfact=multiplier, yfact=-multiplier, origin="centroid")

        # Center Shape
        centroid = shape.centroid
        xoff = (iwidth / 2) - centroid.x
        yoff = (iheight / 2) - centroid.y
        shape = affinity.translate(shape, xoff=xoff, yoff=yoff)

        if geom_type == "MultiPolygon":
            polygons = shape.geoms
        elif geom_type == "Polygon":
            polygons = [shape]
        # @ToDo:
        #elif geom_type == "LineString":
        #    _points = shape
        #elif geom_type == "Point":
        #    _points = [shape]

        points = []
        pappend = points.append
        for polygon in polygons:
            _points = polygon.exterior.coords
            for point in _points:
                pappend("%s,%s" % (point[0], point[1]))

        points = " ".join(points)

        # Wrap in Square for Icon
        # @ToDo: Anti-Aliased Rounded Corners
        # @ToDo: Make optional
        fill = "rgb(167, 192, 210)"
        stroke = "rgb(114, 129, 145)"
        et.SubElement(doc, "rect", width=width, height=height, fill=fill, stroke=stroke)

        # @ToDo: Allow customisation of options
        fill = "rgb(225, 225, 225)"
        stroke = "rgb(165, 165, 165)"
        et.SubElement(doc, "polygon", points=points, fill=fill, stroke=stroke)

        # @ToDo: Add Attributes from list_fields

        # Write out File
        path = os.path.join(current.request.folder, "static", "cache", "svg")
        if not os.path.exists(path):
            os.makedirs(path)
        filepath = os.path.join(path, filename)
        with open(filepath, "w") as f:
            # ElementTree 1.2 doesn't write the SVG file header errata, so do that manually
            f.write("<?xml version=\"1.0\" standalone=\"no\"?>\n")
            f.write("<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\"\n")
            f.write("\"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\n")
            f.write(et.tostring(doc))

        return filepath
开发者ID:gunner272,项目名称:eden,代码行数:93,代码来源:svg.py


示例11: create_pos_file

def create_pos_file(posfile,scalefile, xlims,ylims,dx,\
    geofile=None,ndmin=5, lcmax=2000.,r=1.05, scalefac=1.0):
    """
    Generates a gmsh background scale file (*.pos)
    
    If a geofile is specified the mesh is embedded
    """
    from shapely import geometry, speedups

    if speedups.available:
        speedups.enable()
        
    X,Y = np.meshgrid(np.arange(xlims[0],xlims[1],dx),np.arange(ylims[0],ylims[1],dx))
    xy = np.vstack((X.ravel(),Y.ravel())).T
    Np = xy.shape[0]
    nj,ni=X.shape
    # Load the scalefile
    xyscale,gridscale = readShpPointLine(scalefile,FIELDNAME='scale')
    
    # Load all of the points into shapely type geometry
    
    # Distance method won't work with numpy array
    #P = geometry.asPoint(xy)
    
    P = [geometry.Point(xy[i,0],xy[i,1]) for i in range(Np)]
    
    L=[]
    for ll in xyscale:
        L.append(geometry.asLineString(ll))
     
    nlines = len(L)
    scale_all = np.zeros((nj,ni,nlines))
    for n in range(nlines):
        print 'Calculating distance from line %d...'%n
        ss = gridscale[n] * scalefac
        lmin = ndmin * ss
        
        # Find the maximum distance
        Nk =  np.log(lcmax/ss)/np.log(r)
        print ss,Nk
        lmax = lmin + Nk * ss
        
        dist = [L[n].distance(P[i]) for i in range(Np)]
        dist = np.array(dist).reshape((nj,ni))
        
        # Calculate the scale
        N = (dist-lmin)/ss
        scale = ss*r**N
        
        ind = dist<=lmin
        if ind.any():
            scale[ind] = ss
            
        ind = scale>lcmax
        if ind.any():
            scale[ind] = lcmax
        
        scale_all[:,:,n] = scale
        
    scale_min = scale_all.min(axis=-1)
    
    write_pos_file(posfile,X,Y,scale_min)  
    
    if not geofile == None:
        fgeo = open(geofile,'a')
        fgeo.write("// Merge a post-processing view containing the target mesh sizes\n")
        fgeo.write('Merge "%s";'%posfile)
        fgeo.write("// Apply the view as the current background mesh\n")
        fgeo.write("Background Mesh View[0];\n")
        fgeo.close()
开发者ID:jadelson,项目名称:suntanspy,代码行数:70,代码来源:gmsh.py


示例12: resample_OIB

def resample_OIB(oibdata,ccdata,ql=1):
    '''
    Does the resampling of the OIB IDCSI files.
    Uses a spatial index to search points fast. Does not use prepared polygons
    '''
    from shapely.geometry import Point, Polygon
    from shapely import speedups 
    import datetime
    import numpy as np
    import pytz
    #Setup which columns we want to calculate which statistics for.
    meancols = [0,1,2,3,4,5,6,7,8,11,12,13]
    stdevcols = [2,3,4,5,6,7,8,11,12]
    mediancols = [2,3,4,5,6,7,8,11,12]
    maxmincols = [2,3,4,5,6,7,8,11,12,13] 
    speedups.enable()
    #Check for crossings of IDL/prime meridian
    if ((oibdata['lon'] >=359.).any(skipna=True)==1 and 
        (oibdata['lon']<1.).any(skipna=True)==1 and 
        np.logical_and((179.<oibdata['lon']).any(skipna=True),
        (oibdata['lon']<181.).any(skipna=True)==0)):
        print "Crosses Prime Meridian but not IDL. Coordinates will be" 
        print "processed in -180/180 system."
        oibdata['lon']=deconvert_lon(oibdata['lon'])
        for i in np.arange(len(ccdata)): 
            ccdata[i][6]=deconvert_lon(ccdata[i][6])
            ccdata[i][8]=deconvert_lon(ccdata[i][8])
            ccdata[i][10]=deconvert_lon(ccdata[i][10])
            ccdata[i][12]=deconvert_lon(ccdata[i][12])
            ccdata[i][14]=deconvert_lon(ccdata[i][14])
            converted=0
    elif ((oibdata['lon'] >=359.).any(skipna=True)==0 and
        (oibdata['lon']<1.).any(skipna=True)==0 and 
        np.logical_and((179.<oibdata['lon']).any(skipna=True),
        (oibdata['lon']<181.).any(skipna=True)==1)):
        print "Does not cross prime meridian but crosses IDL. Coordinates will" 
        print "be processed in 0/360 system."
        converted=1
    elif ((oibdata['lon'] >=359.).any(skipna=True)==1 and
        (oibdata['lon']<1.).any(skipna=True)==1 and 
        np.logical_and((179.<oibdata['lon']).any(skipna=True),
        (oibdata['lon']<181.).any(skipna=True)==1)):
        print "Crosses both the Prime Meridian and the IDL. Coordinates will"
        print "be processed in -180/180 system."
        oibdata['lon']=deconvert_lon(oibdata['lon'])
        for i in np.arange(len(ccdata)): 
            ccdata[i][6]=deconvert_lon(ccdata[i][6])
            ccdata[i][8]=deconvert_lon(ccdata[i][8])
            ccdata[i][10]=deconvert_lon(ccdata[i][10])
            ccdata[i][12]=deconvert_lon(ccdata[i][12])
            ccdata[i][14]=deconvert_lon(ccdata[i][14])
            converted=0
    elif ((oibdata['lon'] >=359.).any(skipna=True)==0 and 
        (oibdata['lon']<1.).any(skipna=True)==0 and 
        np.logical_and((179.<oibdata['lon']).any(skipna=True),
        (oibdata['lon']<181.).any(skipna=True)==0)):
        print "Does not cross the IDL or the Prime Meridian. Coordinates will" 
        print "be processed in 0/360 system."
        converted=1

    #Try calculating polygons, filling the index.
    try:
        p=[]
        idx=index.Index() #Setup a spatial indec
        n_records=len(ccdata)
        n_polygon_points=5
        polygon_points =np.ndarray(shape=(n_polygon_points,2),dtype=np.float64)
        p=np.ndarray(shape=(n_records),dtype=object)
        #Calculate the polygons for the CryoSat-2 (cc) footprints
        for i in np.arange(n_records):
            fp_x = [ccdata[i,8],ccdata[i,10],ccdata[i,14],ccdata[i,12],
                ccdata[i,8]]
            fp_y = [ccdata[i,9],ccdata[i,11],ccdata[i,15],ccdata[i,13],
                ccdata[i,9]]
            n_polygon_points=len(fp_x)
            polygon_points[:,0]=fp_x[:]
            polygon_points[:,1]=fp_y[:]
            p[i](Polygon(polygon_points))
        #Fill the spatial index with the icebridge data points
        for j in np.arange(len(oibdata['lon'])):
            idx.insert(j,Point(oibdata.iloc[j]['lon'],
                oibdata.iloc[j]['lat']).bounds)
        #Setup our variables to hold the resampled data. Preallocation is fast.
        newpolys = []
        polys = []
        navg=np.empty((n_records,1))*np.nan
        foobar = np.empty((n_records))*np.nan
        myiflag = np.empty((n_records,1))*np.nan #mode of my ice flag (0 or 1)
        u=np.empty((n_records,len(meancols)))*np.nan #mean
        s=np.empty((n_records,len(stdevcols)))*np.nan #stdev
        m=np.empty((n_records,len(mediancols)))*np.nan #median
        h=np.empty((n_records,len(maxmincols)))*np.nan #max
        l=np.empty((n_records,len(maxmincols)))*np.nan #min
        out = np.empty((n_records,(len(stdevcols)+len(mediancols)+(2*len(maxmincols))+len(meancols)+1)))
        #Let's try the resampling.
        try:
            for i,poly in enumerate(p):   
                newpts=[]
                n_pts = 0
                #Use spatial index to find points that intersect the polygon.
#.........这里部分代码省略.........
开发者ID:SIBeckers,项目名称:pyoval,代码行数:101,代码来源:resample_OIBIDCSI.py


示例13: setUp

 def setUp(self):
     self.assertFalse(speedups._orig)
     speedups.enable()
     self.assertTrue(speedups._orig)
开发者ID:31415us,项目名称:trajectory,代码行数:4,代码来源:test_speedups.py


示例14: get_grid_layer_pt

def get_grid_layer_pt(input_file, height, field_name,
                      grid_shape="square", mask_layer=None,
                      polygon_layer=None, func='density'):
    if speedups.available and not speedups.enabled:
        speedups.enable()
    proj_robinson = (
            "+proj=robin +lon_0=0 +x_0=0 +y_0=0 "
            "+ellps=WGS84 +datum=WGS84 +units=m +no_defs")

    gdf, replaced_id_field = try_open_geojson(input_file)
    if replaced_id_field and field_name == 'id':
        field_name = '_id'

    if field_name:
        if not gdf[field_name].dtype in (int, float):
            # gdf.loc[:, field_name] = gdf[field_name].replace('', np.NaN)
            # gdf.loc[:, field_name] = gdf[field_name].astype(float)
            gdf.loc[:, field_name] = gdf[field_name].apply(to_float)
        gdf = gdf[gdf[field_name].notnull()]
        gdf = gdf[gdf.geometry.notnull()]
        gdf = gdf[~gdf.geometry.is_empty]
        gdf.index = range(len(gdf))

    if polygon_layer:
        polygon_layer = GeoDataFrame.from_file(
            polygon_layer).to_crs(crs=proj_robinson)
        gdf.to_crs(crs=proj_robinson, inplace=True)
        result_values = get_dens_from_pt(gdf, field_name, polygon_layer, func)
        polygon_layer[func] = [i[0] for i in result_values]
        polygon_layer['count'] = [i[1] for i in result_values]
        return polygon_layer.to_crs({"init": "epsg:4326"}).to_json()

    else:
        if mask_layer:
            _mask = GeoDataFrame.from_file(mask_layer)

            mask = GeoSeries(
                cascaded_union(_mask.geometry.buffer(0)),
                crs=_mask.crs,
                ).to_crs(crs=proj_robinson).values[0]

            try:
                mask = mask.buffer(1).buffer(-1)
            except TopologicalError:
                mask = mask.buffer(0)
        else:
            mask = None

        gdf.to_crs(crs=proj_robinson, inplace=True)

        cell_generator = {
            "square": square_grid_gen,
            "diamond": diams_grid_gen,
            "hexagon": hex_grid_gen,
            }[grid_shape]

        res_geoms = get_dens_grid_pt(
            gdf, height, field_name, mask, func, cell_generator)
        result = GeoDataFrame(
            index=range(len(res_geoms)),
            data={'id': [i for i in range(len(res_geoms))],
                  func: [i[1] for i in res_geoms],
                  'count': [i[2] for i in res_geoms]},
            geometry=[i[0] for i in res_geoms],
            crs=gdf.crs
            ).to_crs({"init": "epsg:4326"})

        total_bounds = result.total_bounds
        if total_bounds[0] < -179.9999 or total_bounds[1] < -89.9999 \
                or total_bounds[2] > 179.9999 or total_bounds[3] > 89.9999:
            result = json.loads(result.to_json())
            repairCoordsPole(result)
            return json.dumps(result)
        else:
            return result.to_json()
开发者ID:mthh,项目名称:noname-stuff,代码行数:75,代码来源:grid_layer_pt.py


示例15: setUp

 def setUp(self):
     self.assertFalse(speedups._orig)
     if speedups.available:
         speedups.enable()
         self.assertTrue(speedups._orig)
开发者ID:SiggyF,项目名称:Shapely,代码行数:5,代码来源:test_speedups.py



注:本文中的shapely.speedups.enable函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python wkb.loads函数代码示例发布时间:2022-05-27
下一篇:
Python prepared.prep函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap