How to use the geopandas.GeoDataFrame.from_postgis function in geopandas

To help you get started, we’ve selected a few geopandas examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github geopandas / geopandas / tests / test_geodataframe.py View on Github external
def test_from_postgis_custom_geom_col(self):
        con = connect('test_geopandas')
        if con is None or not create_db(self.df):
            raise unittest.case.SkipTest()

        try:
            sql = """SELECT
                     borocode, boroname, shape_leng, shape_area,
                     geom AS __geometry__
                     FROM nybb;"""
            df = GeoDataFrame.from_postgis(sql, con, geom_col='__geometry__')
        finally:
            if PANDAS_NEW_SQL_API:
                # It's not really a connection, it's an engine
                con = con.connect()
            con.close()

        validate_boro_df(self, df)
github geopandas / geopandas / tests / test_geodataframe.py View on Github external
def test_from_postgis_default(self):
        con = connect('test_geopandas')
        if con is None or not create_db(self.df):
            raise unittest.case.SkipTest()

        try:
            sql = "SELECT * FROM nybb;"
            df = GeoDataFrame.from_postgis(sql, con)
        finally:
            if PANDAS_NEW_SQL_API:
                # It's not really a connection, it's an engine
                con = con.connect()
            con.close()

        validate_boro_df(self, df)
github OpenDataAnalytics / gaia / gaia / geo / geo_inputs.py View on Github external
def df_from_postgis(engine, query, params, geocolumn, epsg):
    """
    Run a PostGIS query and return results as a GeoDataFrame

    :param engine: SQLAlchemy database connection engine
    :param query: Query to run
    :param params: Query parameter list
    :param geocolumn: Geometry column of query
    :param epsg: EPSG code of geometry output
    :return: GeoDataFrame
    """
    data = geopandas.GeoDataFrame.from_postgis(
        query,
        engine,
        geom_col=geocolumn,
        crs={'init': 'epsg:{}'.format(epsg)},
        params=params)
    return data
github akrherz / iem / htdocs / plotting / auto / scripts / p89.py View on Github external
def get_data(ctx):
    """Do the processing work, please"""
    pgconn = get_dbconn("postgis")
    states = gpd.GeoDataFrame.from_postgis(
        """
    SELECT the_geom, state_abbr from states where state_abbr = %s
    """,
        pgconn,
        params=(ctx["state"],),
        index_col="state_abbr",
        geom_col="the_geom",
    )
    if states.empty:
        raise NoDataFound("No data was found.")

    with ncopen(iemre.get_daily_ncname(ctx["year"])) as nc:
        precip = nc.variables["p01d"]
        czs = CachingZonalStats(iemre.AFFINE)
        hasdata = np.zeros(
            (nc.dimensions["lat"].size, nc.dimensions["lon"].size)
github ESA-PhiLab / OpenSarToolkit / ost / s1 / refine.py View on Github external
'swathidentifier', 'ingestiondate',
                        'sensoroperationalmode', 'geometry']

        out_frame = gpd.read_file(inputfile)
        out_frame.columns = column_names

    elif inputfile[-7:] == '.sqlite':
        print(' INFO: Importing Sentinel-1 inventory data from spatialite '
              ' DB file:\n {}'.format(inputfile))
        # needs to be added
    else:
        print(' INFO: Importing Sentinel-1 inventory data from PostGreSQL DB '
              ' table:\n {}'.format(inputfile))
        db_connect = pgHandler()
        sql = 'select * from {}'.format(inputfile)
        out_frame = gpd.GeoDataFrame.from_postgis(sql, db_connect.connection,
                                                  geom_col='geometry')

    if len(out_frame) >= 0:
        print(' INFO: Succesfully converted inventory data into a'
              ' GeoPandas Geo-Dataframe.')

    return out_frame
github akrherz / iem / scripts / climodat / compute_0000.py View on Github external
nc.variables["high_tmpk_12z"][idx, :, :], "K"
        ).value("F")
        low = temperature(nc.variables["low_tmpk_12z"][idx, :, :], "K").value(
            "F"
        )
        precip = distance(nc.variables["p01d_12z"][idx, :, :], "MM").value(
            "IN"
        )
        snow = distance(nc.variables["snow_12z"][idx, :, :], "MM").value("IN")
        snowd = distance(nc.variables["snowd_12z"][idx, :, :], "MM").value(
            "IN"
        )

    # build out the state mappers
    pgconn = get_dbconn("postgis")
    states = gpd.GeoDataFrame.from_postgis(
        """
        SELECT the_geom, state_abbr from states
        where state_abbr not in ('AK', 'HI', 'DC')
    """,
        pgconn,
        index_col="state_abbr",
        geom_col="the_geom",
    )
    czs = CachingZonalStats(iemre.AFFINE)
    sthigh = czs.gen_stats(np.flipud(high), states["the_geom"])
    stlow = czs.gen_stats(np.flipud(low), states["the_geom"])
    stprecip = czs.gen_stats(np.flipud(precip), states["the_geom"])
    stsnow = czs.gen_stats(np.flipud(snow), states["the_geom"])
    stsnowd = czs.gen_stats(np.flipud(snowd), states["the_geom"])

    statedata = {}
github akrherz / iem / scripts / iemre / init_dailyc.py View on Github external
def compute_hasdata():
    """Compute the has_data grid"""
    nc = ncopen(iemre.get_dailyc_ncname(), "a", timeout=300)
    czs = CachingZonalStats(iemre.AFFINE)
    pgconn = get_dbconn("postgis")
    states = gpd.GeoDataFrame.from_postgis(
        "SELECT the_geom, state_abbr from state "
        "where state_abbr not in ('AK', 'HI')",
        pgconn,
        index_col="state_abbr",
        geom_col="the_geom",
    )
    data = np.flipud(nc.variables["hasdata"][:, :])
    czs.gen_stats(data, states["the_geom"])
    for nav in czs.gridnav:
        grid = np.ones((nav.ysz, nav.xsz))
        grid[nav.mask] = 0.0
        yslice = slice(nav.y0, nav.y0 + nav.ysz)
        xslice = slice(nav.x0, nav.x0 + nav.xsz)
        data[yslice, xslice] = np.where(grid > 0, 1, data[yslice, xslice])
    nc.variables["hasdata"][:, :] = np.flipud(data)
    nc.close()
github akrherz / iem / scripts / climodat / compute_0000.py View on Github external
stsnow = czs.gen_stats(np.flipud(snow), states["the_geom"])
    stsnowd = czs.gen_stats(np.flipud(snowd), states["the_geom"])

    statedata = {}
    for i, state in enumerate(states.index.values):
        statedata[state] = dict(
            high=sthigh[i],
            low=stlow[i],
            precip=stprecip[i],
            snow=stsnow[i],
            snowd=stsnowd[i],
        )
        update_database(state + "0000", valid, statedata[state])

    # build out climate division mappers
    climdiv = gpd.GeoDataFrame.from_postgis(
        """
        SELECT geom, iemid from climdiv
        where st_abbrv not in ('AK', 'HI', 'DC')
    """,
        pgconn,
        index_col="iemid",
        geom_col="geom",
    )
    czs = CachingZonalStats(iemre.AFFINE)
    sthigh = czs.gen_stats(np.flipud(high), climdiv["geom"])
    stlow = czs.gen_stats(np.flipud(low), climdiv["geom"])
    stprecip = czs.gen_stats(np.flipud(precip), climdiv["geom"])
    stsnow = czs.gen_stats(np.flipud(snow), climdiv["geom"])
    stsnowd = czs.gen_stats(np.flipud(snowd), climdiv["geom"])

    for i, iemid in enumerate(climdiv.index.values):