Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def __init__(self, projection, scale):
"""
"""
if 'pyproj' not in globals():
raise ImportError('No module named pyproj')
self.proj = pyproj.Proj(projection)
self.scale = float(scale)
class PSF_Object():
type = 'node'
def __init__(self, id, label_id, x, y):
self.id = id
self.label_id = label_id
self.x, self.y = x, y
self.longitude, self.latitude = 0, 0
class Map(tk.Canvas):
projections = {
'Mercator': pyproj.Proj(init="epsg:3395"),
'Azimuthal orthographic': pyproj.Proj('+proj=ortho +lon_0=28 +lat_0=47')
}
size = 10
def __init__(self, controller):
super().__init__(controller, bg='white', width=1300, height=800)
self.controller = controller
self.node_id_to_node = {}
self.drag_item = None
self.start_position = [None]*2
self.start_pos_main_node = [None]*2
self.dict_start_position = {}
self.selected_nodes = set()
self.filepath = None
self.proj = 'Mercator'
def getProjFunc(self,epsg):
return pyproj.Proj(init='EPSG:'+epsg)
if template_crs:
template_crs = CRS.from_string(template_crs)
elif is_geographic(template_ds, template_varname):
template_crs = CRS({'init': 'EPSG:4326'})
else:
raise click.UsageError('template dataset must have a valid projection defined')
spatial_dimensions = template_variable.dimensions[-2:]
mask_shape = template_variable.shape[-2:]
template_y_name, template_x_name = spatial_dimensions
coords = SpatialCoordinateVariables.from_dataset(
template_ds,
x_name=template_x_name,
y_name=template_y_name,
projection=Proj(**template_crs.to_dict())
)
with fiona.open(input, 'r') as shp:
transform_required = CRS(shp.crs) != template_crs
# Project bbox for filtering
bbox = coords.bbox
if transform_required:
bbox = bbox.project(Proj(**shp.crs), edge_points=21)
geometries = []
for f in shp.filter(bbox=bbox.as_list()):
geom = f['geometry']
if transform_required:
geom = transform_geom(shp.crs, template_crs, geom)
# set projection info
ogrerr = utm_cs.ImportFromEPSG(epsg)
if ogrerr != OGRERR_NONE:
raise Exception("GDAL/osgeo ogr error code: {}".format(ogrerr))
# get utm zone (for information) if applicable
utm_zone = utm_cs.GetUTMZone()
# Whilst some projections e.g. Geoscience Australia Lambert (epsg3112) do
# not yield UTM zones, they provide eastings and northings for the whole
# Australian region. We therefore set UTM zones, only when a valid UTM zone
# is available
if(utm_zone>0):
# set projection info
utm_cs.SetUTM(abs(utm_zone), utm_zone > 0)
else:
pp = pyproj.Proj('+init=EPSG:%d'%(epsg))
# end if
else:
if utm_zone is not None:
# get zone number and is_northern from utm_zone string
zone_number = int(utm_zone[0:-1])
is_northern = True if utm_zone[-1].lower() > 'n' else False
else:
# get centre point and get zone from that
latc = (np.nanmax(lat) + np.nanmin(lat)) / 2.
lonc = (np.nanmax(lon) + np.nanmin(lon)) / 2.
zone_number, is_northern, utm_zone = get_utm_zone(latc, lonc)
# set projection info
if(HAS_GDAL):
utm_cs.SetUTM(zone_number, is_northern)
else:
f (h5pyd.File): A HDF5 "file" used to access the WIND Toolkit data.
lat_index (float): Latitude coordinate for which dataset indices
are to be found (degrees).
lon_index (float): Longitude coordinate for which dataset indices
are to be found (degrees).
Returns:
tuple: A tuple containing the Lat/Lon coordinate indices of
interest in the WIND Toolkit dataset.
"""
dset_coords = f["coordinates"]
projstring = """+proj=lcc +lat_1=30 +lat_2=60
+lat_0=38.47240422490422 +lon_0=-96.0
+x_0=0 +y_0=0 +ellps=sphere
+units=m +no_defs """
projectLcc = Proj(projstring)
origin_ll = reversed(dset_coords[0][0]) # Grab origin directly from database
origin = projectLcc(*origin_ll)
coords = (lon_index, lat_index)
coords = projectLcc(*coords)
delta = np.subtract(coords, origin)
ij = [int(round(x / 2000)) for x in delta]
return tuple(reversed(ij))
def init_mercator(self):
"""
Compute mercator projection stuff
"""
self.metadata["proj"] = pyproj.Proj(
proj="merc",
lat_ts=self.metadata["latin"],
x_0=0,
y_0=0,
a=6371200.0,
b=6371200.0,
)
x0, y0 = self.metadata["proj"](
self.metadata["lon1"], self.metadata["lat1"]
)
self.metadata["x0"] = x0
self.metadata["y0"] = y0
x1, y1 = self.metadata["proj"](
self.metadata["lon2"], self.metadata["lat2"]
)
def __init__(self, affine, shape, mask=None, nodata=None,
crs=pyproj.Proj('+init=epsg:4326'),
y_coord_ix=0, x_coord_ix=1):
if affine is not None:
self.affine = affine
else:
self.affine = Affine(0,0,0,0,0,0)
super().__init__(shape=shape, mask=mask, nodata=nodata, crs=crs,
y_coord_ix=y_coord_ix, x_coord_ix=x_coord_ix)
approach.
"""
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
import pyproj
import numpy as np
import verde as vd
# We'll test this on the air temperature data from Texas
data = vd.datasets.fetch_texas_wind()
coordinates = (data.longitude.values, data.latitude.values)
region = vd.get_region(coordinates)
# Use a Mercator projection for our Cartesian gridder
projection = pyproj.Proj(proj="merc", lat_ts=data.latitude.mean())
# The output grid spacing will 15 arc-minutes
spacing = 15 / 60
# Now we can chain a blocked mean and spline together. The Spline can be regularized
# by setting the damping coefficient (should be positive). It's also a good idea to set
# the minimum distance to the average data spacing to avoid singularities in the spline.
chain = vd.Chain(
[
("mean", vd.BlockReduce(np.mean, spacing=spacing * 111e3)),
("spline", vd.Spline(damping=1e-10, mindist=100e3)),
]
)
print(chain)
# We can evaluate model performance by splitting the data into a training and testing
def transform(epsg_in, epsg_out, x, y):
crs_in = proj.Proj(init = epsg_in)
crs_out = proj.Proj(init = epsg_out)
xx, yy= proj.transform(crs_in, crs_out, x, y)
return xx,yy