Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
for station, row in df.iterrows():
if station not in ctx["nt"].sts:
continue
df.at[station, "lat"] = ctx["nt"].sts[station]["lat"]
df.at[station, "lon"] = ctx["nt"].sts[station]["lon"]
if varname == "precip":
last_wet = row["last_wet"]
days = 0 if last_wet in [today, yesterday] else row["precip_days"]
else:
days = row[varname + "_days"]
df.at[station, "val"] = days
df.at[station, "color"] = "#FF0000" if days > 0 else "#0000FF"
df.at[station, "label"] = station[1:]
df = df[pd.notnull(df["lon"])]
ctx["df"] = gpd.GeoDataFrame(
df, geometry=gpd.points_from_xy(df["lon"], df["lat"])
)
ctx["subtitle"] = (
"based on NWS CLI Sites, map approximately " "valid for %s"
) % (today.strftime("%-d %b %Y"),)
model_hydro = gens_860.loc[
gens_860["technology_description"] == "Conventional Hydroelectric"
].merge(plant_entity[["plant_id_eia", "latitude", "longitude"]], on="plant_id_eia")
no_lat_lon = model_hydro.loc[
(model_hydro["latitude"].isnull()) | (model_hydro["longitude"].isnull()), :
]
if not no_lat_lon.empty:
print(no_lat_lon["summer_capacity_mw"].sum(), " MW without lat/lon")
model_hydro = model_hydro.dropna(subset=["latitude", "longitude"])
# Convert the lon/lat values to geo points. Need to add an initial CRS and then
# change it to align with the IPM regions
model_hydro_gdf = gpd.GeoDataFrame(
model_hydro,
geometry=gpd.points_from_xy(model_hydro.longitude, model_hydro.latitude),
crs={"init": "epsg:4326"},
)
if model_hydro_gdf.crs != model_regions_gdf.crs:
model_hydro_gdf = model_hydro_gdf.to_crs(model_regions_gdf.crs)
model_hydro_gdf = gpd.sjoin(model_regions_gdf, model_hydro_gdf)
model_hydro_gdf = model_hydro_gdf.rename(columns={"IPM_Region": "region"})
keep_cols = ["plant_id_eia", "region"]
return model_hydro_gdf.loc[:, keep_cols]
df = pd.DataFrame(
{'City': ['Buenos Aires', 'Brasilia', 'Santiago', 'Bogota', 'Caracas'],
'Country': ['Argentina', 'Brazil', 'Chile', 'Colombia', 'Venezuela'],
'Latitude': [-34.58, -15.78, -33.45, 4.60, 10.48],
'Longitude': [-58.66, -47.91, -70.66, -74.08, -66.86]})
###############################################################################
# A ``GeoDataFrame`` needs a ``shapely`` object. We use geopandas
# ``points_from_xy()`` to transform **Longitude** and **Latitude** into a list
# of ``shapely.Point`` objects and set it as a ``geometry`` while creating the
# ``GeoDataFrame``. (note that ``points_from_xy()`` is an enhanced wrapper for
# ``[Point(x, y) for x, y in zip(df.Longitude, df.Latitude)]``)
gdf = geopandas.GeoDataFrame(
df, geometry=geopandas.points_from_xy(df.Longitude, df.Latitude))
###############################################################################
# ``gdf`` looks like this :
print(gdf.head())
###############################################################################
# Finally, we plot the coordinates over a country-level map.
world = geopandas.read_file(geopandas.datasets.get_path('naturalearth_lowres'))
# We restrict to South America.
ax = world[world.continent == 'South America'].plot(
color='white', edgecolor='black')
bnds[1] - buf,
bnds[0] - buf,
bnds[3] + buf,
bnds[2] + buf,
bnds[3] + buf,
bnds[2] + buf,
bnds[1] - buf,
bnds[0] - buf,
bnds[1] - buf,
ctx["day"],
),
)
if df.empty:
raise NoDataFound("No Data Found.")
df = gpd.GeoDataFrame(
df, geometry=gpd.points_from_xy(df["lon"], df["lat"])
)
return df[pd.notnull(df[ctx["v"]])]
logger.warning(
"Some generators do not have lon/lat data. Check the source "
"file to determine if they should be included in results. "
f"\nThe affected generators account for {no_lat_lon_cap} in balancing "
"authorities: "
f"\n{no_lat_lon['balancing_authority_code'].tolist()}"
)
planned = planned.dropna(subset=["latitude", "longitude"])
# Convert the lon/lat values to geo points. Need to add an initial CRS and then
# change it to align with the IPM regions
print("Creating gdf")
planned_gdf = gpd.GeoDataFrame(
planned.copy(),
geometry=gpd.points_from_xy(planned.longitude.copy(), planned.latitude.copy()),
crs={"init": "epsg:4326"},
)
# planned_gdf.crs = {"init": "epsg:4326"}
if planned_gdf.crs != model_regions_gdf.crs:
planned_gdf = planned_gdf.to_crs(model_regions_gdf.crs)
planned_gdf = gpd.sjoin(model_regions_gdf.drop(columns="IPM_Region"), planned_gdf)
# Add planned additions from the settings file
if settings["additional_planned"]:
i = 0
for record in settings["additional_planned"]:
plant_id, gen_id, model_region = record
plant_record = planned.loc[
(planned["plant_id_eia"] == plant_id)
& (planned["generator_id"] == gen_id),
def to_geodataframe(self):
gdf = gpd.GeoDataFrame(self.copy(), geometry=gpd.points_from_xy(self[constants.LONGITUDE],
self[constants.LATITUDE]), crs=self._crs)
return gdf
a = df[[origin_lat, origin_lng]].rename(columns={origin_lat: 'lat', origin_lng: 'lng'})
b = df[[destination_lat, destination_lng]].rename(columns={destination_lat: 'lat',
destination_lng: 'lng'})
# DropDuplicates has to be applied now because Geopandas doesn't support removing duplicates in geometry
points = pd.concat([a, b]).drop_duplicates(['lat', 'lng'])
points = gpd.GeoDataFrame(geometry=gpd.points_from_xy(points['lng'], points['lat']),
crs=constants.DEFAULT_CRS)
tessellation = tiler.get('voronoi', points=points)
# Step 2: map origin and destination points into the tessellation
gdf_origin = gpd.GeoDataFrame(df.copy(), geometry=gpd.points_from_xy(df[origin_lng], df[origin_lat]),
crs=tessellation.crs)
gdf_destination = gpd.GeoDataFrame(df.copy(),
geometry=gpd.points_from_xy(df[destination_lng], df[destination_lat]),
crs=tessellation.crs)
if all(isinstance(x, Polygon) for x in tessellation.geometry):
if remove_na:
how = 'inner'
else:
how = 'left'
origin_join = gpd.sjoin(gdf_origin, tessellation, how=how, op='within').drop("geometry", axis=1)
destination_join = gpd.sjoin(gdf_destination, tessellation, how=how, op='within').drop("geometry",
axis=1)
if len(real_first_matches) == 2:
false_xy.append(
(round(l_coords[0][0], precision), round(l_coords[0][1], precision))
)
if len(real_second_matches) == 2:
false_xy.append(
(round(l_coords[-1][0], precision), round(l_coords[-1][1], precision))
)
false_unique = list(set(false_xy))
x, y = zip(*false_unique)
if GPD_08:
points = gpd.points_from_xy(x, y).buffer(tolerance)
else:
points = gpd.GeoSeries(gpd.points_from_xy(x, y)).buffer(tolerance)
geoms = streets
idx = max(geoms.index) + 1
for x, y, point in tqdm(
zip(x, y, points), desc="Merging segments", total=len(x), disable=not verbose
):
if GPD_08:
predic = geoms.sindex.query(point, predicate="intersects")
matches = geoms.iloc[predic].geometry
else:
pos = list(geoms.sindex.intersection(point.bounds))
mat = geoms.iloc[pos]
matches = mat[mat.intersects(point)].geometry