How to use the geopandas.GeoDataFrame.from_file function in geopandas

To help you get started, we’ve selected a few geopandas examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github bukun / book_python_gis / part010 / ch09_others / sec4_geopandas / test_5_geometric_manipulations_x_x.py View on Github external
p1 = Polygon([(0, 0), (1, 0), (1, 1)])
p2 = Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
p3 = Polygon([(2, 0), (3, 0), (3, 1), (2, 1)])
g = GeoSeries([p1, p2, p3])
###############################################################################
g.plot()
plt.show()
###############################################################################
print (g.area)
###############################################################################
buf=g.buffer(0.5)
###############################################################################
buf.plot()
plt.show()
###############################################################################
boros = GeoDataFrame.from_file('/gdata/GSHHS_c.shp')
###############################################################################
cull = boros['geometry'].convex_hull
cull.plot()
plt.show()
###############################################################################
from shapely.geometry import Point
import numpy as np
xmin, xmax, ymin, ymax = -180, 180, -90, 90
xc = (xmax - xmin) * np.random.random(2000) + xmin
yc = (ymax - ymin) * np.random.random(2000) + ymin
pts = GeoSeries([Point(x, y) for x, y in zip(xc, yc)])
###############################################################################
circles = pts.buffer(2)
###############################################################################
mp = circles.unary_union
###############################################################################
github PatrikHlobil / Pandas-Bokeh / Tests / test_GeoPandasBokeh.py View on Github external
def test_hole_geplot():
    "Tests for (multi-)polygones with holes."

    df = gpd.GeoDataFrame.from_file(
        os.path.join(test_sets_directory, 'hole_shapes', 'hole_shapes.geojson')
    )
    figure = df.plot_bokeh(show_figure=False)

    with open(
        os.path.join(directory, "Plots", "Geolayers_Multipolygons_Holes.html"), "w"
    ) as f:
        f.write(pandas_bokeh.embedded_html(figure))

    assert True
github QratorLabs / measurement_tools / atlas_tools / latency_countrymap.py View on Github external
def _draw_countrymap(cut_countries, fname):
    logger.info('Drawing the countrymap')

    resource_dir = pkg_resources.resource_filename(__package__, SHAPEFILE_DIR)
    resource_fname = os.path.join(resource_dir, SHAPEFILE_NAME)
    df_shapefile_countries = geopandas.GeoDataFrame.from_file(resource_fname)

    dataframe = pandas.DataFrame(
        data=list(cut_countries.items()),
        columns=['iso_a2', 'Latency'],
    )
    dataframe.set_index('iso_a2', inplace=True)

    linear = folium.LinearColormap(
        ['green', 'yellow', 'red'], vmin=0., vmax=120.
    ).to_step(6)

    countrymap = folium.Map(
        location=[20, 20],
        zoom_start=3, min_zoom=2, max_zoom=8,
        tiles='Mapbox Bright'
    )
github USEPA / StreamCat / StreamCat_functions.py View on Github external
def PointInPoly(points, zone, inZoneData, pct_full, mask_dir, appendMetric, summaryfield=None):
    '''
    __author__ =  "Marc Weber "
                  "Rick Debbout "
    Returns either the count of spatial points feature in every polygon in a spatial polygons feature or the summary of
    an attribute field for all the points in every polygon of a spatial polygons feature

    Arguments
    ---------
    points        : input points geographic features as a GeoPandas GeoDataFrame
    InZoneData    : input polygon shapefile as a string, i.e. 'C:/Temp/outshape.shp'
    pct_full      : table that links COMIDs to pct_full, determined from catchments that are  not within the US Census border
    summaryfield  : a list of the field/s in points feature to use for getting summary stats in polygons
    '''
    #startTime = dt.now()
    polys = gpd.GeoDataFrame.from_file(inZoneData)#.set_index('FEATUREID')
    points = points.to_crs(polys.crs)
    if len(mask_dir) > 1:
        polys = polys.drop('AreaSqKM', axis=1)
        tblRP = dbf2DF('%s/%s.tif.vat.dbf' % (mask_dir, zone))
        tblRP['AreaSqKM'] = (tblRP.COUNT * 900) * 1e-6
        tblRP['AreaSqKM'] = tblRP['AreaSqKM'].fillna(0)
        polys = pd.merge(polys, tblRP, left_on='GRIDCODE', right_on='VALUE', how='left')
        polys.crs = {u'datum': u'NAD83', u'no_defs': True, u'proj': u'longlat'}
        #polys = final[['FEATUREID', 'AreaSqKM', fld]]
    # Get list of lat/long fields in the table
    points['latlon_tuple'] = zip(points.geometry.map(lambda point: point.x),points.geometry.map(lambda point: point.y))
    # Remove duplicate points for 'Count'
    points2 = points.drop_duplicates('latlon_tuple') # points2.head() polys.head() point_poly_join.head()
    try:
        point_poly_join = sjoin(points2, polys, how="left", op="within") # point_poly_join.loc[point_poly_join.FEATUREID > 1]
        fld = 'GRIDCODE'  #next(str(unicode(x)) for x in polys.columns if x != 'geometry')
github ESA-PhiLab / OpenSarToolkit / ost / helpers / vector.py View on Github external
def shp_to_gdf(shapefile):

    gdf = gpd.GeoDataFrame.from_file(shapefile)

    prjfile = shapefile[:-4] + '.prj'
    proj4 = get_proj4(prjfile)

    if proj4 != '+proj=longlat +datum=WGS84 +no_defs':
        print(' INFO: reprojecting AOI layer to WGS84.')
        # reproject
        gdf.crs = (proj4)
        gdf = gdf.to_crs({'init': 'epsg:4326'})

    return gdf
github CartoDB / cartoframes / cartoframes / core / cartodataframe.py View on Github external
def from_file(cls, filename, **kwargs):
        """
        Alternate constructor to create a CartoDataFrame from a Shapefile or GeoJSON file.
        Extends from the `GeoDataFrame.from_file
        `_ method.

        Examples:

            .. code::

                from cartoframes import CartoDataFrame

                cdf = CartoDataFrame.from_file('nybb.shp')
        """
        result = GeoDataFrame.from_file(filename, **kwargs)
        result.__class__ = cls
        return result
github USEPA / StreamCat / StreamCat_PreProcessing.py View on Github external
if DataCategory == 'categorical':
                           resamp_type='NEAREST'
                       snapping_pnt = "%f %f"%(desc.extent.XMin,desc.extent.YMin)
                       arcpy.ProjectRaster_management(tempras, finalras, out_coor_system, resamp_type, ConvertRes, "", snapping_pnt)
               if UseArcpy == 'No':
                   # Need to add ability to mask as well with gdal / rasterio approach...
                   if not Proj_projcs==dst_crs:
                        resamp_ras = FinalDir + '/' + OutFile + '.tif'
                        resamp_string = "gdalwarp --config GDAL_DATA " + '"C:/Users/mweber/AppData/Local/Continuum/Anaconda/pkgs/libgdal-1.11.2-2/Library/data" ' +' -tr ' + str(ConvertRes) + ' -' + str(ConvertRes) + " -te " + bounds + " -srcnodata " + str(outNDV) +  " -dstnodata "  + str(outNDV) +  " -of GTiff -r near -t_srs " + dst_crs + " -co COMPRESS=DEFLATE -co TFW=YES -co TILED=YES -co TIFF_USE_OVR=TRUE -ot " + outDataType + " " + tempras + " " + resamp_ras
                        startTime = dt.now()
                        call(resamp_string)
                        print "elapsed time " + str(dt.now()-startTime)  
                
        # Processes for vector features
        if FileType == 'ESRI Shapefile':
            Feat = gpd.GeoDataFrame.from_file(InDir + '/' + InFile + '.shp')
            # just for census block groups, subset just to CONUS
            if InFile == 'tl_2010_US_bg10':
                Feat = Feat.loc[~Feat['STATEFP10'].isin(['02','15','72'])]
            # check if we need to create fields and / or run calculations on fields in shapefile
            if ShapeFieldCalc == 'Yes':
                # first project if needed to Albers Equal Area
                if not Feat.crs['proj'] == 'aea':
                    Feat = Feat.to_crs(epsg=5070)
                # gather all the values from the ShapefileFieldCalc control table 
                f = FieldCalcTable.loc[FieldCalcTable['FileName'] == InFile]
                JoinTable = f.loc[f['FileName'] == InFile,'JoinTable']
                InField = f.loc[f['FileName'] == InFile,'InField']
                OutField = f.loc[f['FileName'] == InFile,'OutField']
                Operation = f.loc[f['FileName'] == InFile,'Operation']
                Value = f.loc[f['FileName'] == InFile,'Value']
github OGGM / oggm / oggm / sandbox / itmix / run_itmix.py View on Github external
cfg.PATHS['itmix_divs'] = os.path.join(DATA_DIR, 'itmix', 'my_divides.pkl')

# Calibration data where ITMIX glaciers have been removed
_path = os.path.join(DATA_DIR, 'itmix', 'wgms',
                     'rgi_wgms_links_2015_RGIV5.csv')
cfg.PATHS['wgms_rgi_links'] = _path
_path = os.path.join(DATA_DIR, 'itmix',
                     'glathida',
                     'rgi_glathida_links_2014_RGIV5.csv')
cfg.PATHS['glathida_rgi_links'] = _path

# Read in the RGI file(s)
rgidf = itmix.get_rgi_df(reset=False)

# Set the newly created divides (quick n dirty fix)
df = gpd.GeoDataFrame.from_file(get_demo_file('divides_workflow.shp'))
df = pd.concat([df, pd.read_pickle(cfg.PATHS['itmix_divs'])])
cfg.PARAMS['divides_gdf'] = df.set_index('RGIID')

# Run parameters
cfg.PARAMS['d1'] = 4
cfg.PARAMS['dmax'] = 100

cfg.PARAMS['border'] = 20

cfg.PARAMS['invert_with_sliding'] = False
cfg.PARAMS['min_slope'] = 2
cfg.PARAMS['max_shape_param'] = 0.006
cfg.PARAMS['max_thick_to_width_ratio'] = 0.5
cfg.PARAMS['base_binsize'] = 100.
cfg.PARAMS['temp_use_local_gradient'] = False