AS a build off from this question I posted over a year ago Buffer miles in decimal degrees with fiona and shapely
I am attempting to create 50-mile buffers around each earthquake. I have taken the advice of AndreJ, and creating a custom transverse mercator projection on the earthquake center, then buffering it 50 miles or whatever distance I want in that custom CRS, then reprojecting it back to EPSG:4326.
So the code runs and for the most part, it does a solid job of buffering each earthquake in the custom SRID and reprojecting it back to 4326 but there are some noticeable errors.
- South America and some other parts of the world do not have their buffers around the points...
here is the customs CRS string in my code
cs="+proj=tmerc +lat_0={0} +lon_0={1} +k=1 +x_0=0 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs".format(y,x)
so for each earthquake x,y lat_0 and lon_0 are replaced. I am going to post my whole code here at the bottom in case somebody wants to run it and play with it themselves. If it seems unnecessary, then I will remove the code
import urllib2
import json
import fiona
from shapely.geometry import Point, mapping, Polygon,shape
from fiona.crs import from_epsg,from_string
from fiona.transform import transform_geom,transform
def EarthQuakeBuffers(shpname,USGSurl,dist):
crs = from_epsg(4326)
schema = {'geometry': 'Polygon', 'properties': {'Place': 'str', 'KM': 'int','Bearing':'str', 'Magnitude': 'str'}}
with fiona.open(shpname, "w", "ESRI Shapefile", schema, crs) as output:
url = USGSurl
weburl = urllib2.urlopen(url)
if weburl.getcode() == 200:
data = json.loads(weburl.read())
for i in data["features"]:
place = ""
mag = i["properties"]["mag"]
p1 = i["properties"]["place"].split()
p2 = ''.join(c for c in p1[0] if c not in 'km')
km = ''.join(z for z in p2 if z.isdigit())
bearing = ''.join(b for b in p1[1] if b in ('S','SSE','SE','ESE','E','ENE','NE','NNE','N','NNW','NW','WNW','W','WSW','SW','SSW'))
if type(km) == str: km = None
else: km = int(str(km))
if type(bearing) == str: bearing = None
if bearing == None and km == None: place = i["properties"]["place"]
else: place = ' '.join(i["properties"]["place"].split()[3:])
print place, mag, km, bearing
x,y=float(i["geometry"]["coordinates"][0]),float(i["geometry"]["coordinates"][1])
cs="+proj=tmerc +lat_0={0} +lon_0={1} +k=1 +x_0=0 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs".format(y,x)
cs_custom=from_string(cs)
x2,y2=transform(crs,cs_custom,[y],[x])
point=Point(x2[0],y2[0])
poly =point.buffer(dist)
poly2=transform_geom(cs_custom,crs,mapping(poly))
output.write({'properties':{'Place': place, 'Magnitude': mag, 'KM': km, 'Bearing': bearing},
'geometry': mapping(shape(poly2))})
def EarthQuakePoints(shpname,USGSurl):
crs = from_epsg(4326)
schema = {'geometry': 'Point', 'properties': {'Place': 'str', 'KM': 'int','Bearing':'str', 'Magnitude': 'str'}}
with fiona.open(shpname, "w", "ESRI Shapefile", schema, crs) as output:
url = USGSurl
weburl = urllib2.urlopen(url)
if weburl.getcode() == 200:
data = json.loads(weburl.read())
for i in data["features"]:
place = ""
mag = i["properties"]["mag"]
p1 = i["properties"]["place"].split()
p2 = ''.join(c for c in p1[0] if c not in 'km')
km = ''.join(z for z in p2 if z.isdigit())
bearing = ''.join(b for b in p1[1] if b in ('S','SSE','SE','ESE','E','ENE','NE','NNE','N','NNW','NW','WNW','W','WSW','SW','SSW'))
if type(km) == str: km = None
else: km = int(str(km))
if type(bearing) == str: bearing = None
if bearing == None and km == None: place = i["properties"]["place"]
else: place = ' '.join(i["properties"]["place"].split()[3:])
print place, mag, km, bearing
point = Point(float(i["geometry"]["coordinates"][0]),float(i["geometry"]["coordinates"][1]))
output.write({'properties':{'Place': place, 'Magnitude': mag, 'KM': km, 'Bearing': bearing},
'geometry': mapping(point)})
points=r'path\earthquake_points.shp'
buffers=r'path\earthquake_buffers.shp'
url = 'https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_week.geojson'
EarthQuakePoints(points,url)
EarthQuakeBuffers(buffers,url,80467.2) #50 miles in meters
So my question is why are some buffers getting left out or just mapped incorrectly? is there a better way to go about creating a custom CRS given lat long?
Answer
The ArcMap warning message suggests to use conventional SRS rather than custom ones in order to avoid unexpected results. So I'd calculate the UTM zone where each earthquake falls and then use the existing UTM SRS. Here's the code snippet that implements it:
from utm import latlon_to_zone_number
zone_number = latlon_to_zone_number(lat, lon)
if lat > 0:
# northern hemisphere
cs = "+proj=utm +zone={} +datum=WGS84 +units=m +no_defs".format(zone_number)
else:
# southern hemisphere
cs = "+proj=utm +zone={} +south +datum=WGS84 +units=m +no_defs".format(zone_number)
UPDATE: Here's the fixed version of your script (there was an inversion of coordinate before creating the points). It works also with the custom SRSs:
import urllib2
import json
import fiona
from shapely.geometry import Point, mapping, Polygon,shape
from fiona.crs import from_epsg,from_string
from fiona.transform import transform_geom,transform
#from utm import latlon_to_zone_number
def EarthQuakeBuffers(shpname,USGSurl,dist):
crs = from_epsg(4326)
schema = {'geometry': 'Polygon', 'properties': {'Place': 'str', 'KM': 'int','Bearing':'str', 'Magnitude': 'str'}}
with fiona.open(shpname, "w", "ESRI Shapefile", schema, crs) as output:
url = USGSurl
weburl = urllib2.urlopen(url)
if weburl.getcode() == 200:
data = json.loads(weburl.read())
for i in data["features"]:
place = ""
mag = i["properties"]["mag"]
p1 = i["properties"]["place"].split()
p2 = ''.join(c for c in p1[0] if c not in 'km')
km = ''.join(z for z in p2 if z.isdigit())
bearing = ''.join(b for b in p1[1] if b in ('S','SSE','SE','ESE','E','ENE','NE','NNE','N','NNW','NW','WNW','W','WSW','SW','SSW'))
if type(km) == str: km = None
else: km = int(str(km))
if type(bearing) == str: bearing = None
if bearing == None and km == None: place = i["properties"]["place"]
else: place = ' '.join(i["properties"]["place"].split()[3:])
print place, mag, km, bearing
x,y=float(i["geometry"]["coordinates"][0]),float(i["geometry"]["coordinates"][1])
cs="+proj=tmerc +lat_0={0} +lon_0={1} +k=1 +x_0=0 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs".format(y,x)
#zone_number = latlon_to_zone_number(y, x)
#if y > 0:
# # northern hemisphere
# cs = "+proj=utm +zone={} +datum=WGS84 +units=m +no_defs".format(zone_number)
#else:
# # southern hemisphere
# cs = "+proj=utm +zone={} +south +datum=WGS84 +units=m +no_defs".format(zone_number)
cs_custom=from_string(cs)
#x2,y2=transform(crs,cs_custom,[y],[x]) #ERROR
x2,y2=transform(crs,cs_custom,[x],[y])
point=Point(x2[0],y2[0])
poly =point.buffer(dist)
poly2=transform_geom(cs_custom,crs,mapping(poly))
output.write({'properties':{'Place': place, 'Magnitude': mag, 'KM': km, 'Bearing': bearing},
'geometry': mapping(shape(poly2))})
def EarthQuakePoints(shpname,USGSurl):
crs = from_epsg(4326)
schema = {'geometry': 'Point', 'properties': {'Place': 'str', 'KM': 'int','Bearing':'str', 'Magnitude': 'str'}}
with fiona.open(shpname, "w", "ESRI Shapefile", schema, crs) as output:
url = USGSurl
weburl = urllib2.urlopen(url)
if weburl.getcode() == 200:
data = json.loads(weburl.read())
for i in data["features"]:
place = ""
mag = i["properties"]["mag"]
p1 = i["properties"]["place"].split()
p2 = ''.join(c for c in p1[0] if c not in 'km')
km = ''.join(z for z in p2 if z.isdigit())
bearing = ''.join(b for b in p1[1] if b in ('S','SSE','SE','ESE','E','ENE','NE','NNE','N','NNW','NW','WNW','W','WSW','SW','SSW'))
if type(km) == str: km = None
else: km = int(str(km))
if type(bearing) == str: bearing = None
if bearing == None and km == None: place = i["properties"]["place"]
else: place = ' '.join(i["properties"]["place"].split()[3:])
print place, mag, km, bearing
point = Point(float(i["geometry"]["coordinates"][0]),float(i["geometry"]["coordinates"][1]))
output.write({'properties':{'Place': place, 'Magnitude': mag, 'KM': km, 'Bearing': bearing},
'geometry': mapping(point)})
points=r'path\earthquake_points.shp'
buffers=r'path\earthquake_buffers.shp'
url = 'https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_week.geojson'
EarthQuakePoints(points,url)
EarthQuakeBuffers(buffers,url,80467.2) #50 miles in meters
No comments:
Post a Comment