From 4f1888a914fc6945c3ff15fc627456ddc2c63239 Mon Sep 17 00:00:00 2001 From: dsentinel Date: Wed, 4 Sep 2019 13:28:31 -0600 Subject: [PATCH 01/25] ready to merge --- ph5/core/cs2cs.py | 2 +- ph5/core/ph5utils.py | 25 ++++++- ph5/entry_points.py | 7 +- ph5/utilities/segd2ph5.py | 16 ++--- ph5/utilities/tests/test_utmtolatlong.py | 30 +++++++++ ph5/utilities/utmtolatlong.py | 86 ++++++++++++++++++++++++ 6 files changed, 153 insertions(+), 13 deletions(-) create mode 100644 ph5/utilities/tests/test_utmtolatlong.py create mode 100644 ph5/utilities/utmtolatlong.py diff --git a/ph5/core/cs2cs.py b/ph5/core/cs2cs.py index d641792c..868cd103 100755 --- a/ph5/core/cs2cs.py +++ b/ph5/core/cs2cs.py @@ -58,7 +58,7 @@ def utm2geod(zn, datum, X, Y, Z): ''' Convert UTM coordinates to geodetic coordinates ''' p = Proj(proj='utm', zone=zn, ellps=datum) - lon, lan = p(X, Y, inverse=True) + lon, lat = p(X, Y, inverse=True) return lat, lon, Z diff --git a/ph5/core/ph5utils.py b/ph5/core/ph5utils.py index c2c83f57..83895e87 100644 --- a/ph5/core/ph5utils.py +++ b/ph5/core/ph5utils.py @@ -15,8 +15,9 @@ from ph5.core.timedoy import epoch2passcal, passcal2epoch, TimeDOY, TimeError import time import re +from pyproj import Proj, transform -PROG_VERSION = "2019.81" +PROG_VERSION = "2019.239" class PH5Response(object): @@ -68,6 +69,28 @@ def get_n_i(self, sensor_keys, datalogger_keys): return ph5_resp.n_i +class UTMConversions: # added 2019-08-27 dthomas + + def __init__(self): + self.wgs = Proj(init="epsg:4326", proj="latlong") + + def lat_long(self, easting, northing, zone, hemisphere): + # utm to lat/long conversion + side = hemisphere.upper() + if side == "S": + ns = "south" + elif side == "N": + ns = "north" + else: + ns = None # will throw error + + mystr = "+proj=utm +zone=" + str(zone) + \ + " +" + ns + " +ellps=WGS84" + utm = Proj(mystr.strip()) + lon, lat = transform(utm, self.wgs, float(easting), float(northing)) + return (lat, lon) + + """ =============== Utility methods diff --git a/ph5/entry_points.py b/ph5/entry_points.py index b9ddad2f..b47a2b43 100644 --- a/ph5/entry_points.py +++ b/ph5/entry_points.py @@ -5,7 +5,7 @@ # Dave Thomas, 2019-08-06 -PROG_VERSION = '2019.218' +PROG_VERSION = '2019.239' class EntryPointTypes(): @@ -272,5 +272,10 @@ def __init__(self): 'ph5.clients.ph5tostationxml:main', 'Takes PH5 files and returns StationXML.', type=EntryPointTypes.CLIENT), + EntryPoint('utmtolatlong', + 'ph5.utilities.utmtolatlong:main', + 'Converts UTM coordinates to Latitudes' + ' & Longitudes.', + type=EntryPointTypes.INGESTION), ] } diff --git a/ph5/utilities/segd2ph5.py b/ph5/utilities/segd2ph5.py index 183f3f0b..3f04a86c 100644 --- a/ph5/utilities/segd2ph5.py +++ b/ph5/utilities/segd2ph5.py @@ -15,10 +15,9 @@ import json import re from math import modf -from ph5.core import experiment, columns, segdreader -from pyproj import Proj, transform +from ph5.core import experiment, columns, segdreader, ph5utils -PROG_VERSION = "2019.14" +PROG_VERSION = "2019.239" MAX_PH5_BYTES = 1073741824 * 100. # 100 GB (1024 X 1024 X 1024 X 2) @@ -967,6 +966,7 @@ def txncsptolatlon(northing, easting): Convert texas state plane coordinates in feet to geographic coordinates, WGS84. ''' + from pyproj import Proj, transform # Texas NC state plane feet Zone 4202 sp = Proj(init='epsg:32038') # WGS84, geographic @@ -995,13 +995,9 @@ def utmcsptolatlon(northing, easting): else: NS = 'north' - utmc = Proj("+proj=utm +zone="+utmzone+" +"+NS+" +ellps=WGS84") - print - # WGS84, geographic - wgs = Proj(init='epsg:4326', proj='latlong') - # - lon, lat = transform(utmc, wgs, easting, northing) - +# OLD WAY - deprecated, called pyproj. New way uses ph5utils. + utm = ph5utils.UTMConversions() + lat, lon = utm.lat_long(easting, northing, utmzone, NS) return lat, lon diff --git a/ph5/utilities/tests/test_utmtolatlong.py b/ph5/utilities/tests/test_utmtolatlong.py new file mode 100644 index 00000000..99dfe69a --- /dev/null +++ b/ph5/utilities/tests/test_utmtolatlong.py @@ -0,0 +1,30 @@ +import unittest +import pyproj +from ph5.core.ph5utils import UTMConversions + + +class TestUTMconversion(unittest.TestCase): + + def test_is_valid_utm_conversion(self): + u = UTMConversions() # PIC Socorro + self.assertAlmostEqual(u.lat_long(322916, 3771967, 13, 'N'), + (34.07349577107704, -106.91909595147378)) + + def test_is_valid_utm_conversion_south(self): + u = UTMConversions() # Castle Rock Antarctica + self.assertAlmostEqual(u.lat_long(540947, 1361594, 58, 'S'), + (-77.81567398301094, 166.73816638798527)) + + def test_for_correct_type(self): + with self.assertRaises(ValueError): + u = UTMConversions() # 'heck' is not a float + u.lat_long('heck', 'no', 58, 'S') + + def test_for_correct_value(self): # 666 is invalid UTM zone + with self.assertRaises(pyproj.exceptions.CRSError): + u = UTMConversions() + u.lat_long(540947, 1361594, 666, 'S') + + +if __name__ == "__main__": + unittest.main() diff --git a/ph5/utilities/utmtolatlong.py b/ph5/utilities/utmtolatlong.py new file mode 100644 index 00000000..f442b568 --- /dev/null +++ b/ph5/utilities/utmtolatlong.py @@ -0,0 +1,86 @@ +# This script can be used to convert UTM coordinates to Latitude/Longitude +# Dave Thomas, 2019-08-01 + +from __future__ import (print_function) +from ph5.core import ph5utils +import argparse +import logging + +PROG_VERSION = '2019.239' +LOGGER = logging.getLogger(__name__) + + +def convert_file(infile, outfile): + counter = 0 + with open(outfile, "w") as theoutfile, open(infile) as theinfile: + ws = "Easting, Northing, Zone, Hemisphere, => Longitude, Latitude\n" + theoutfile.write(ws) + for line in theinfile: + counter = counter + 1 + if counter == 1: + continue # skip header line + fieldset = line.strip().split(",") + if len(fieldset) <= 1: + continue + easting = float(fieldset[0]) + northing = float(fieldset[1]) + zone = int(fieldset[2]) + hemisphere = str(fieldset[3]).upper() + try: + utm = ph5utils.UTMConversions() + lat, lon = utm.lat_long(easting, northing, zone, hemisphere) + ps = "easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" + printstr1 = (ps % (easting, northing, zone, hemisphere)) + printstr2 = "Lon = %.7f, Lat = %.7f" % (lon, lat) + print (printstr1 + " => " + printstr2) + outstr = "%.1f, %.1f, %d, %s, %.7f, %.7f" \ + % (easting, northing, zone, hemisphere, lon, lat) + theoutfile.write(outstr + "\n") + except ValueError: + print ("There was an error in UTM file conversion.") + return + + +def doinline(easting, northing, zone, hemisphere): + print ("easting=%s, northing=%s, zone=%s, hemisphere=%s" + % (easting, northing, zone, hemisphere)) + try: + utm = ph5utils.UTMConversions() + indat = ("Input: easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" + % (float(easting), float(northing), int(zone), hemisphere)) + print (indat) + lat, lon = utm.lat_long(float(easting), float(northing), + int(zone), hemisphere) + outstr = "Lon = %.7f, Lat = %.7f" % (lon, lat) + print (outstr) + print (''.join('*'*50)) + except ValueError: + print ("There was an error in UTM data conversion.") + return + + +def main(): + desc = 'Convert UTM locations to longitudes, latitudes, \ +in batches (-i, -o) or inline (-e,-n,-z,-s)' + parser = argparse.ArgumentParser(description=desc) + group = parser.add_mutually_exclusive_group() + group.add_argument("-i", "--infile", help="Input file with UTM \ +Easting, Northing, Zone, Hemisphere") + parser.add_argument("-o", "--outfile", help="Output file with \ +converted longitudes, latitudes") + group.add_argument("-e", "--easting", help="Enter UTM Easting") + parser.add_argument("-n", "--northing", help="Enter UTM Northing") + parser.add_argument("-z", "--zone", help="Enter UTM Zone") + parser.add_argument("-s", "--side", help="Side=Hemisphere, N or S") + + args = parser.parse_args() + + if args.infile is not None and args.outfile is not None: + convert_file(args.infile, args.outfile) + + else: # direct method + doinline(args.easting, args.northing, args.zone, args.side) + + +if __name__ == '__main__': + main() From 699ee8ee2d91ded28649724d7e6166037e9ff189 Mon Sep 17 00:00:00 2001 From: dsentinel Date: Wed, 4 Sep 2019 13:31:24 -0600 Subject: [PATCH 02/25] ready to merge --- ph5/utilities/tests/test_utmtolatlong.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ph5/utilities/tests/test_utmtolatlong.py b/ph5/utilities/tests/test_utmtolatlong.py index 99dfe69a..6e369cff 100644 --- a/ph5/utilities/tests/test_utmtolatlong.py +++ b/ph5/utilities/tests/test_utmtolatlong.py @@ -8,12 +8,12 @@ class TestUTMconversion(unittest.TestCase): def test_is_valid_utm_conversion(self): u = UTMConversions() # PIC Socorro self.assertAlmostEqual(u.lat_long(322916, 3771967, 13, 'N'), - (34.07349577107704, -106.91909595147378)) + (34.07349577107704, -106.91909595147378)) def test_is_valid_utm_conversion_south(self): u = UTMConversions() # Castle Rock Antarctica self.assertAlmostEqual(u.lat_long(540947, 1361594, 58, 'S'), - (-77.81567398301094, 166.73816638798527)) + (-77.81567398301094, 166.73816638798527)) def test_for_correct_type(self): with self.assertRaises(ValueError): From 3731a3b6602373a1c171f97002e214beb2bf4f3d Mon Sep 17 00:00:00 2001 From: dsentinel Date: Wed, 4 Sep 2019 13:32:34 -0600 Subject: [PATCH 03/25] really ready to merge this time, honest --- ph5/utilities/segd2ph5.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/ph5/utilities/segd2ph5.py b/ph5/utilities/segd2ph5.py index 3f04a86c..6758ccf3 100644 --- a/ph5/utilities/segd2ph5.py +++ b/ph5/utilities/segd2ph5.py @@ -988,12 +988,9 @@ def utmcsptolatlon(northing, easting): new_UTM = re.split(r'(\d+)', UTM) utmzone = str(new_UTM[1]) - if str(new_UTM[2]).upper() == 'N': - NS = 'north' - elif str(new_UTM[2]).upper() == 'S': - NS = 'south' - else: - NS = 'north' + NS = str(new_UTM[2]).upper() + if NS != 'N' and NS != 'S': + NS = 'N' # OLD WAY - deprecated, called pyproj. New way uses ph5utils. utm = ph5utils.UTMConversions() From 99c826c2bb92e14e22b495a68eb06e2fad8ebab9 Mon Sep 17 00:00:00 2001 From: David Thomas Date: Wed, 2 Oct 2019 10:50:41 -0600 Subject: [PATCH 04/25] conversions consolidated into ph5utils --- ph5/core/cs2cs.py | 92 --- ph5/core/ph5utils.py | 56 +- ph5/core/segyfactory.py | 20 +- ph5/core/sfactory.py | 753 ----------------------- ph5/utilities/segd2ph5.py | 60 +- ph5/utilities/tests/test_utmtolatlong.py | 11 + 6 files changed, 79 insertions(+), 913 deletions(-) delete mode 100755 ph5/core/cs2cs.py delete mode 100755 ph5/core/sfactory.py diff --git a/ph5/core/cs2cs.py b/ph5/core/cs2cs.py deleted file mode 100755 index 868cd103..00000000 --- a/ph5/core/cs2cs.py +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/env pnpython3 -# -# Convert geographic coordinates to UTM and back. -# Assumes reference ellipsoids are the same and are 3D. -# -# Steve Azevedo, January 2013 -# - -# https://code.google.com/p/pyproj/ -from pyproj import Proj - -# The proj4 program cs2cs must be in your path -# CS2CS = 'cs2cs' - - -def _sign(val, latlon): - ret = val - try: - nsew = str(val[0]) - nsew.upper() - if nsew == 'N' or nsew == 'S' or nsew == 'E' or nsew == 'W': - return ret - - if nsew == '+': - val = val[1:] - - if latlon == 'lat': - if nsew == '-': - ret = 'S' + val[1:] - else: - ret = 'N' + val - elif latlon == 'lon': - if nsew == '-': - ret = 'W' + val[1:] - else: - ret = 'E' + val - - except IndexError: - pass - - return ret - - -def lon2zone(lon): - ''' Get UTM zone from longitude, brute force method ''' - # zone 1 = -180 -> -174 - # zone 2 = -174 -> -168 - for zone in range(1, 60): - ebound = (zone * 6) - 180 - wbound = ebound - 6 - if lon >= wbound and lon <= ebound: - return zone - - return None - - -def utm2geod(zn, datum, X, Y, Z): - ''' Convert UTM coordinates to geodetic coordinates ''' - p = Proj(proj='utm', zone=zn, ellps=datum) - - lon, lat = p(X, Y, inverse=True) - - return lat, lon, Z - - -def geod2utm(zn, datum, lat, lon, elev): - ''' Convert geodetic coordinates to UTM ''' - if zn is None: - zn = lon2zone(lon) - - p = Proj(proj='utm', zone=zn, ellps=datum) - - X, Y = p(lon, lat) - - # Return Y, X, Z - return Y, X, elev - - -if __name__ == '__main__': - lat = 34.023786 - lon = -106.898492 - elev = 1456.0 - zone = 13 - - z = lon2zone(lon) - print z - - Y, X, Z = geod2utm(zone, 'WGS84', lat, lon, elev) - print Y, X, Z - - lat, lon, elev = utm2geod(zone, 'WGS84', X, Y, Z) - print lat, lon, elev diff --git a/ph5/core/ph5utils.py b/ph5/core/ph5utils.py index 83895e87..770c4d63 100644 --- a/ph5/core/ph5utils.py +++ b/ph5/core/ph5utils.py @@ -15,7 +15,8 @@ from ph5.core.timedoy import epoch2passcal, passcal2epoch, TimeDOY, TimeError import time import re -from pyproj import Proj, transform +from pyproj import Transformer +import math PROG_VERSION = "2019.239" @@ -69,27 +70,56 @@ def get_n_i(self, sensor_keys, datalogger_keys): return ph5_resp.n_i -class UTMConversions: # added 2019-08-27 dthomas - - def __init__(self): - self.wgs = Proj(init="epsg:4326", proj="latlong") +class UTMConversions: # updated 2019-09-27 dthomas def lat_long(self, easting, northing, zone, hemisphere): # utm to lat/long conversion - side = hemisphere.upper() + epsg_wgs84 = "EPSG:4326" + side = hemisphere[0].upper() if side == "S": - ns = "south" + epsgroot = "327" elif side == "N": - ns = "north" + epsgroot = "326" else: - ns = None # will throw error + epsgroot = None # will throw error + epsg_utm = "EPSG:" + epsgroot + str(zone) - mystr = "+proj=utm +zone=" + str(zone) + \ - " +" + ns + " +ellps=WGS84" - utm = Proj(mystr.strip()) - lon, lat = transform(utm, self.wgs, float(easting), float(northing)) + transformer = Transformer.from_crs(epsg_utm, + epsg_wgs84, always_xy=True) + lon, lat = transformer.transform(float(easting), float(northing)) return (lat, lon) + def lon2zone(self, lon): + ''' Get UTM zone from longitude ''' + zone = int(math.floor((lon + 180.)/6.0) % 60) + 1 + return zone + + def geod2utm(self, lat, lon, elev): + # lat/long to UTM conversion + zone = self.lon2zone(lon) + epsg_wgs84 = "EPSG:4326" + if lat < 0.0: + epsgroot = "327" + elif lat >= 0.0: + epsgroot = "326" + epsg_utm = "EPSG:" + epsgroot + str(zone) + + transformer = Transformer.from_crs(epsg_wgs84, epsg_utm, + always_xy=True) + easting, northing = transformer.transform(lon, lat) + return (northing, easting, elev) # e.g. Y, X, Z + + +class TSPConversions: # added 2019-09-30 dthomas, Texas State Plane Coords + + def lat_long(self, easting, northing): + epsg_wgs84 = "EPSG:4326" + epsg_sp4202 = "EPSG:2276" # State Plane Coords in US FEET + transformer = Transformer.from_crs(epsg_sp4202, epsg_wgs84, + always_xy=True) + lon, lat = transformer.transform(easting, northing) + return (lon, lat) + """ =============== diff --git a/ph5/core/segyfactory.py b/ph5/core/segyfactory.py index 066ad4fa..bdb32396 100644 --- a/ph5/core/segyfactory.py +++ b/ph5/core/segyfactory.py @@ -17,7 +17,7 @@ import sys import logging from pyproj import Geod -from ph5.core.cs2cs import geod2utm +from ph5.core import ph5utils # for geod2utm from ph5.core import segy_h, ebcdic PROG_VERSION = '2018.268' @@ -787,11 +787,10 @@ def set_trace_header(self): if self.utm is True: try: - Y, X, Z = geod2utm(None, # Zone goes here - "WGS84", - self.array_t['location/Y/value_d'], - self.array_t['location/X/value_d'], - self.array_t['location/Z/value_d']) + utmc = ph5utils.UTMConversions() + Y, X, Z = utmc.geod2utm(self.array_t['location/Y/value_d'], + self.array_t['location/X/value_d'], + self.array_t['location/Z/value_d']) s, vx, vy = pick_values_32(X, Y) tra['coordScale'] = s @@ -844,11 +843,10 @@ def set_trace_header(self): if self.utm: try: - Y, X, Z = geod2utm(None, # Zone goes here - "WGS84", - self.event_t['location/Y/value_d'], - self.event_t['location/X/value_d'], - self.event_t['location/Z/value_d']) + utmc = ph5utils.UTMConversions() + Y, X, Z = utmc.geod2utm(self.event_t['location/Y/value_d'], + self.event_t['location/X/value_d'], + self.event_t['location/Z/value_d']) s, vx, vy = pick_values_32(X, Y) tra['sourceLongOrX'] = vx diff --git a/ph5/core/sfactory.py b/ph5/core/sfactory.py deleted file mode 100755 index 199d4460..00000000 --- a/ph5/core/sfactory.py +++ /dev/null @@ -1,753 +0,0 @@ -#!/usr/bin/env pnpython3 - -# -# Build SEG-Y or PASSCAL SEGY file. -# -# This sits between the code that talks to the ph5 -# file, and the code that generates the SEG file. -# -# Steve Azevedo, August 2007 -# - -from ph5.core import segy_h, ebcdic -from cs2cs import geod2utm -import math -import numpy -import os -import time -import string -import sys -import logging - -PROG_VERSION = '2018.268' -LOGGER = logging.getLogger(__name__) - -os.environ['TZ'] = 'UTC' -time.tzset() - -MAX_16 = 32767. -MAX_32 = 2147483648. - - -def __version__(): - print PROG_VERSION - - -# Map channel number to Trace ID (29-30 in trace header) -CHAN2TID = {1: 1, 2: 16, 3: 17, 4: 15, 5: 16, 6: 17} -COUNTMULT = {'nV/count': 1000000000., 'uV/count': 1000000., - 'mV/count': 1000., 'volts/count': 1.} -EXT_HEADER_CHOICES = ['P', 'S', 'U', 'I'] - - -class Ssegy: - ''' - - ''' - - def __init__(self, - # start_point, # Starting point integer - # length_points, # Number of points - # das_t, # Das_t row - sort_t, # Sort_t row - # array_t, # Array_t row - # time_t, # Time_t row - event_t, # Event_t row - # response_t, # Response_t row - # receiver_t, # Receiver_t row (orientation) - # offset_t, # Offset_t - pas='U', # 'P' -> PASSCAL extended header - # 'S' -> SEG extended header - # 'U' -> Menlo USGS extended header - # 'I' -> SIOSEIS - # 'N' -> iNova firefly - length_points=0, - seq=1, # Line sequence number - utm=False): # Populate trace header with UTM coordinates - - self.start_point = 0 - self.length_points = length_points - if length_points == 0: - self.length_points_all = 0 - else: - self.length_points_all = length_points - - self.das_t = None - self.sample_rate = None - self.sort_t = sort_t - self.time_t = None - self.event_t = event_t - self.response_t = None - self.offset_t = None - self.pas = pas - self.utm = utm - self.seq = seq - self.text_header = segy_h.Text() - self.reel_header = segy_h.Reel() - # Allow non-standard SEG-Y - self.break_standard = False - self.trace_type = None # Current data trace type (int, float) - self.trace_byteorder = None # Current data trace byteorder - - def write_text_header(self, fd): - fd.write(self.text_header.get()[:3200]) - - def write_reel_header(self, fd): - # 400 bytes - fd.write(self.reel_header.get()[:400]) - - def write_trace_header(self, fd): - # 180 bytes - try: - fd.write(self.trace_header.get()[:180]) - except Exception as e: - LOGGER.error("{0:s}\n{1:s}" - .format(e, repr(self.trace_header.__dict__))) - # 60 bytes - try: - fd.write(self.extended_header.get()[:60]) - except Exception as e: - LOGGER.error("{0:s}\n{1:s}\n" - .format(e, repr(self.extended_header.__dict__))) - - def write_data_array(self, fd): - # Pad data to correct length with the median value - pad = numpy.array([]) - if len(self.data) < self.length_points_all: - if len(self.data) == 0: - m = 0 - else: - m = numpy.median(self.data) - - short = self.length_points_all - len(self.data) - pad = [m] * short - data = numpy.append(self.data, pad) - i = 0 - # Use PASSCAL extended header - if self.pas == 'P': - # PASSCAL SEGY should be the endianess of the machine - if self.trace_type == 'int': - x_d = numpy.array(data, numpy.int32) - # Float trace elements - elif self.trace_type == 'float': - x_d = numpy.array(data, numpy.float32) - else: - LOGGER.error( - "Trace type unknown: {0}\n".format(self.trace_type)) - - # Write the data on the end of the file - x_d.tofile(file=fd) - # Get the number of points we wrote - i += x_d.shape[0] - else: - # Need to look in self.response_t for - # bit_weight/value_d and scale trace values. - x_f = numpy.array(data, numpy.float32) - try: - bw = float(self.response_t['bit_weight/value_d']) - if bw == 0: - bw = 1. - x_f *= bw - except Exception as e: - LOGGER.warning( - "Problem applying trace bit weight.\n{0}" - .format(e)) - # Little endian machine - # We always want big endian in the SEG-Y file - if sys.byteorder == 'little': - # Little endian trace - if self.trace_byteorder == 'little': - # Int trace elements - if self.trace_type == 'int': - x_d = numpy.array(data, numpy.int32).byteswap() - # Float trace elements - elif self.trace_type == 'float': - x_d = numpy.array(data, numpy.float32).byteswap() - elif sys.byteorder == 'big': - # Big endian trace - if self.trace_byteorder == 'big': - if self.trace_type == 'int': - x_d = numpy.array(data, numpy.int32) - elif self.trace_type == 'float': - x_d = numpy.array(data, numpy.float32) - - x_f.tofile(file=fd) - i += x_f.shape[0] - return i - - def set_event_t(self, event_t): - self.event_t = event_t - - def set_array_t(self, array_t): - self.array_t = array_t - - # PASSCAL extended header - def set_pas(self): - self.pas = 'P' - # SEG extended header - - def set_seg(self): - self.pas = 'S' - # USGS Menlo extended header - - def set_usgs(self): - self.pas = 'U' - # Set extended header type - - def set_ext_header_type(self, ext_type): - if ext_type in EXT_HEADER_CHOICES: - self.pas = ext_type - - def set_data(self, data): - self.data = data - - def set_trace_type(self, t, o): - ''' Set trace type, and byteorder ''' - self.trace_type = t - self.trace_byteorder = o - - def set_das_t(self, das_t): - self.das_t = das_t - self.sample_rate = das_t['sample_rate_i'] - - def set_sample_rate(self, sample_rate): - self.sample_rate = sample_rate - - def set_time_t(self, time_t): - self.time_t = time_t - - def set_response_t(self, response_t): - self.response_t = response_t - - # Orientation info - def set_receiver_t(self, receiver_t): - self.receiver_t = receiver_t - - def set_offset_t(self, offset_t): - self.offset_t = offset_t - - def set_length_points(self, length_points): - self.length_points = length_points - if self.length_points_all == 0: - self.length_points_all = length_points - - def set_line_sequence(self, seq): - self.seq = seq - - def set_cut_start_epoch(self, start): - self.cut_start_epoch = start - - def set_text_header(self): - txt = {} - if self.pas == 'U': - style = 'MENLO' - elif self.pas == 'P': - style = 'PASSCAL' - elif self.pas == 'S': - style = 'SEG' - elif self.pas == 'I': - style = 'SIOSEIS' - elif self.pas == 'N': - style = 'INOVA' - - if self.break_standard is True: - txt['_06_'] = ebcdic.AsciiToEbcdic( - "C 6 SAMPLES/TRACE {0:6d} ".format(int(self.length_points))) - - txt['_38_'] = ebcdic.AsciiToEbcdic( - "C38 {0:<7} STYLE EXTENDED TRACE HEADER".format(style) + " " * 41) - txt['_39_'] = ebcdic.AsciiToEbcdic("C39 SEG Y REV1" + " " * 66) - txt['_40_'] = ebcdic.AsciiToEbcdic( - "C40 END TEXTURAL HEADER" + " " * 57) - - try: - self.text_header.set(txt) - except segy_h.HeaderError as e: - LOGGER.error(e) - - def set_reel_header(self, traces): - rel = {} - - try: - rel['lino'] = int(self.sort_t['array_name_s']) - except (ValueError, TypeError): - rel['lino'] = 1 - - rel['reno'] = 1 - rel['ntrpr'] = traces - rel['hdt'] = int((1.0 / float(self.sample_rate)) * 1000000.0) - if self.length_points <= MAX_16: - rel['hns'] = self.length_points - rel['nso'] = self.length_points - # Non-standard sample length - elif self.break_standard is True: - rel['hns'] = 0 - rel['nso'] = 0 - else: - rel['hns'] = int(MAX_16) - rel['nso'] = int(MAX_16) - - rel['format'] = 5 # IEEE floats - rel['mfeet'] = 1 # meters - rel['rev'] = 0x0100 # rev 1.0 - rel['trlen'] = 1 # all traces the same length - rel['extxt'] = 0 # no extra text headers - - try: - self.reel_header.set(rel) - except segy_h.HeaderError as e: - LOGGER.error(e) - - def set_break_standard(self, tof=False): - self.break_standard = tof - - def _cor(self, max_drift_rate=0.01): - ''' - Calculate start, end, drift and offset of clock - ''' - if self.sort_t: - sort_start_time = fepoch( - self.sort_t['start_time/epoch_l'], - self.sort_t['start_time/micro_seconds_i']) - else: - sort_start_time = self.cut_start_epoch - - if self.time_t is None: - return 0, 0, sort_start_time - - if self.sort_t: - sort_end_time = fepoch( - self.sort_t['end_time/epoch_l'], - self.sort_t['end_time/micro_seconds_i']) - else: - sort_end_time = sort_start_time + \ - (self.length_points / self.sample_rate) - - sort_mid_time = sort_start_time + \ - ((sort_end_time - sort_start_time) / 2.0) - data_start_time = fepoch( - self.time_t['start_time/epoch_l'], - self.time_t['start_time/micro_seconds_i']) - delta_time = sort_mid_time - data_start_time - - # 1% drift is excessive, don't time correct. - if abs(self.time_t['slope_d']) >= max_drift_rate: - time_correction_ms = 0 - else: - time_correction_ms = int( - self.time_t['slope_d'] * 1000.0 * delta_time) - - # Sample interval - si = 1.0 / float(int(self.sample_rate)) - # Check if we need to time correct? - if abs(self.time_t['offset_d']) < (si / 2.0): - time_correction_ms = 0 - # KLUDGE reverse sign here - if time_correction_ms < 0: - time_correction_ms *= -1 - sgn = 1 - else: - sgn = -1 - - new_start_time = (float(time_correction_ms * sgn) / - 1000.0) + sort_start_time - - return ( - 0xFFFF & time_correction_ms) * sgn, ( - 0xFFFF & (time_correction_ms << 16)) * sgn, new_start_time - - def set_ext_header_seg(self): - ''' SEG-Y rev 01 extended header ''' - ext = {} - self.extended_header = segy_h.Seg() - # Same as lino from reel header - try: - ext['Inn'] = int(self.sort_t['array_name_s']) - except (ValueError, TypeError): - ext['Inn'] = 1 - # Shot point number - try: - ext['Spn'] = int(self.event_t['id_s']) - except ValueError: - ext['Spn'] = 0 - # Spn scaler - ext['Scal'] = 1 - # Trace value measurement units - ext['Tvmu'] = 0 - # Size of shot - ext['Smsmant'] = int(self.event_t['size/value_d']) - ext['Smsexp'] = 1 - ext['Smu'] = 0 - # Number of samples - ext['num_samps'] = self.length_points - # Sample interval in microseconds - ext['samp_rate'] = int((1.0 / self.sample_rate) * 1000000.0) - - return ext - - def set_ext_header_pas(self): - ext = {} - self.extended_header = segy_h.Passcal() - - cor_low, cor_high, sort_start_time = self._cor() - if cor_high < -MAX_16 or cor_high > MAX_16: - cor_high = int(MAX_16) - - ext['totalStaticHi'] = cor_high - ext['num_samps'] = int(self.length_points) - ext['max'] = numpy.max(self.data) - ext['min'] = numpy.min(self.data) - ext['samp_rate'] = int((1.0 / self.sample_rate) * 1000000.0) - ext['data_form'] = 1 # 32 bit - ext['scale_fac'] = float(self.response_t['bit_weight/value_d']) - - corrected_start_time = self.cut_start_epoch + (cor_low / 1000.0) - m_secs = int(math.modf(corrected_start_time)[0] * 1000.0) - ext['m_secs'] = m_secs - - try: - ttuple = time.gmtime([self.event_t['time/epoch_l']]) - ext['trigyear'] = ttuple[0] - ext['trigday'] = ttuple[7] - ext['trighour'] = ttuple[3] - ext['trigminute'] = ttuple[4] - ext['trigsecond'] = ttuple[5] - ext['trigmills'] = int( - self.event_t['time/micro_seconds_i'] / 1000.0) - except BaseException: - pass - - try: - try: - ext['inst_no'] = int(self.array_t['das/serial_number_s']) - except ValueError: - ext['inst_no'] = int(self.array_t['das/serial_number_s'], 16) - except BaseException: - ext['inst_no'] = 0 - - try: - ext['station_name'] = string.ljust( - string.strip(self.array_t['id_s']), 6) - except BaseException: - ext['station_name'] = string.ljust( - string.strip(self.array_t['das/serial_number_s']), 6) - - return ext - - def set_ext_header_menlo(self): - ''' Use USGS Menlo's idea of extended trace header ''' - ext = {} - self.extended_header = segy_h.Menlo() - - # Start of trace - cor_low, cor_high, sort_start_time = self._cor() - corrected_start_time = self.cut_start_epoch + (cor_low / 1000.0) - u_secs = int(math.modf(corrected_start_time)[0] * 1000000.0) - ext['start_usec'] = u_secs - # Shot size in Kg - try: - if self.event_t['size/units_s'][0] == 'k' or\ - self.event_t['size/units_s'][0] == 'K': - ext['shot_size'] = self.event_t['size/value_d'] - except TypeError: - pass - - # Shot time - try: - ttuple = time.gmtime(float(self.event_t['time/epoch_l'])) - ext['shot_year'] = ttuple[0] - ext['shot_doy'] = ttuple[7] - ext['shot_hour'] = ttuple[3] - ext['shot_minute'] = ttuple[4] - ext['shot_second'] = ttuple[5] - ext['shot_us'] = self.event_t['time/micro_seconds_i'] - except BaseException: - pass - - # Always set to 0 - ext['si_override'] = 0 - # Azimuth and inclination, set to 0? - ext['sensor_azimuth'] = 0 - ext['sensor_inclination'] = 0 - # Linear moveout static x/v ms - ext['lmo_ms'] = 0 - # LMO flag, 1 -> n - ext['lmo_flag'] = 1 - # Inst type, 16 == texan - if self.array_t['das/model_s'].find('130') != -1: - ext['inst_type'] = 13 # RT-130 - else: - ext['inst_type'] = 16 # texan - - # Always set to 0 - ext['correction'] = 0 - # Uphole azimuth set to zero - ext['azimuth'] = 0 - # Sensor type - if self.array_t['sensor/model_s'].find('28') != -1: - ext['sensor_type'] = 1 # L28 - elif self.array_t['sensor/model_s'].find('22') != -1: - ext['sensor_type'] = 2 # L22 - elif self.array_t['sensor/model_s'].find('4') != -1: - ext['sensor_type'] = 4 # L4 - else: - ext['sensor_type'] = 99 # Don't know, don't care - - # Sensor sn - try: - ext['sensor_sn'] = int(self.array_t['sensor/serial_number_s']) - except BaseException: - pass - - # DAS sn - try: - ext['das_sn'] = int(self.array_t['das/serial_number_s']) - except ValueError: - ext['das_sn'] = 0xFFFF & int( - self.array_t['das/serial_number_s'], 16) - else: - pass - - # 16 free bits - try: - ext['empty1'] = self.array_t['channel_number_i'] - except BaseException: - pass - - # Number of samples - ext['samples'] = self.length_points - # 32 free bits - try: - ext['empty2'] = int(self.array_t['description_s']) - except BaseException: - pass - - # clock correction - try: - ext['clock_drift'] = self._cor()[0] - if ext['clock_drift'] > MAX_16 or ext['clock_drift'] < -MAX_16: - ext['clock_drift'] = int(MAX_16) - except BaseException: - pass - - # 16 free bits - try: - ext['empty3'] = int(self.event_t['description_s']) - except BaseException: - pass - - return ext - - def set_ext_header_sioseis(self): - ''' Use SIOSEIS extended header ''' - ext = {} - self.extended_header = segy_h.Sioseis() - ext['sampleInt'] = 1.0 / self.sample_rate - return ext - - def set_trace_header(self): - ''' - Set values in trace header. - ''' - tra = {} - self.trace_header = segy_h.Trace() - - # Get time correction - cor_low, cor_high, sort_start_time = self._cor() - if cor_low < -MAX_16 or cor_low > MAX_16: - cor_low = int(MAX_16) - - tra['totalStatic'] = cor_low - - tra['lineSeq'] = self.seq - # das_t['event_number'] is the FFID or recording window - tra['event_number'] = self.das_t['event_number_i'] - tra['channel_number'] = self.seq - # Set the traceID to the channel, 15 => Z, 16 => N, 17 => E - # Fallback is to set it to 1 => seismic data - try: - tra['traceID'] = CHAN2TID[self.array_t['channel_number_i']] - except BaseException: - # Changed for Mark Goldman, Aug 2011 - tra['traceID'] = 1 - - length_points = int(self.length_points) - if length_points < MAX_16: - tra['sampleLength'] = length_points - # Non-standard sample length - elif self.break_standard is True: - tra['sampleLength'] = 0 - else: - tra['sampleLength'] = int(MAX_16) - - sample_rate = float(int(self.sample_rate)) - if sample_rate > 30.0: - tra['deltaSample'] = int((1.0 / sample_rate) * 1000000.0) - else: - tra['deltaSample'] = 1 - - tra['gainType'] = 1 - tra['gainConst'] = int(self.response_t['gain/value_i']) - - corrected_start_time = self.cut_start_epoch + (cor_low / 1000.0) - ttuple = time.gmtime(corrected_start_time) - tra['year'] = ttuple[0] - tra['day'] = ttuple[7] - tra['hour'] = ttuple[3] - tra['minute'] = ttuple[4] - tra['second'] = ttuple[5] - tra['timeBasisCode'] = 4 # UTC - - twfUnits = self.response_t['bit_weight/units_s'].strip() - try: - mult = COUNTMULT[twfUnits] - except BaseException: - mult = 1. - - try: - tra['traceWeightingFactor'] = int( - math.log(self. - response_t['bit_weight/value_d'] / mult, 2) + 0.5) - except ValueError: - tra['traceWeightingFactor'] = 1. - # Limit size of phoneFirstTrace to 65535 maximum - tra['phoneFirstTrace'] = 0xFFFF & int(self.array_t['id_s']) - - # Set receiver location here - try: - multiplier = units_stub(string.strip( - self.array_t['location/Z/units_s']), 'decimeters') - tra['recElevation'] = int( - float(self.array_t['location/Z/value_d']) * multiplier) - tra['elevationScale'] = -10 - except BaseException: - tra['recElevation'] = 0 - tra['elevationScale'] = 0 - - if self.utm is True: - try: - Y, X, Z = geod2utm(None, # Zone goes here - "WGS84", - self.array_t['location/Y/value_d'], - self.array_t['location/X/value_d'], - self.array_t['location/Z/value_d']) - tra['coordScale'] = -10 - tra['recLongOrX'] = int((X * 10.0) + 0.5) - tra['recLatOrY'] = int((Y * 10.0) + 0.5) - tra['coordUnits'] = 1 # meters - except BaseException: - tra['coordScale'] = 0 - tra['recLongOrX'] = 0 - tra['recLatOrY'] = 0 - tra['coordUnits'] = 0 - else: - try: - tra['coordScale'] = -10000 - tra['recLongOrX'] = int( - (self.array_t['location/X/value_d'] * 10000.0) + 0.5) - tra['recLatOrY'] = int( - (self.array_t['location/Y/value_d'] * 10000.0) + 0.5) - tra['coordUnits'] = 3 - except BaseException: - tra['coordScale'] = 0 - tra['recLongOrX'] = 0 - tra['recLatOrY'] = 0 - tra['coordUnits'] = 0 - - if self.event_t: - try: - tra['energySourcePt'] = int(self.event_t['id_s']) - except Exception: - tra['energySourcePt'] = 0 - - # Set source location here - try: - multiplier = units_stub(string.strip( - self.event_t['location/Z/units_s']), 'decimeters') - tra['sourceSurfaceElevation'] = int( - float(self.event_t['location/Z/value_d']) * multiplier) - tra['sourceDepth'] = int( - float(self.event_t['depth/value_d']) * multiplier) - except BaseException: - tra['sourceSurfaceElevation'] = 0 - tra['sourceDepth'] = 0 - - if self.utm: - try: - Y, X, Z = geod2utm(None, # Zone goes here - "WGS84", - self.event_t['location/Y/value_d'], - self.event_t['location/X/value_d'], - self.event_t['location/Z/value_d']) - - tra['sourceLongOrX'] = int((X * 10.0) + 0.5) - tra['sourceLatOrY'] = int((Y * 10.0) + 0.5) - except BaseException: - tra['sourceLongOrX'] = 0 - tra['sourceLatOrY'] = 0 - - else: - try: - tra['sourceLongOrX'] = int( - (self.event_t['location/X/value_d'] * 10000.0) + 0.5) - tra['sourceLatOrY'] = int( - (self.event_t['location/Y/value_d'] * 10000.0) + 0.5) - except BaseException: - tra['sourceLongOrX'] = 0 - tra['sourceLatOrY'] = 0 - - if self.offset_t: - tra['sourceToRecDist'] = self.offset_t['offset/value_d'] - - self.trace_header.set(tra) - - if self.pas == 'P': - # print "Set PASSCAL header" - ext = self.set_ext_header_pas() - elif self.pas == 'U': - # print "Set Menlo header" - ext = self.set_ext_header_menlo() - elif self.pas == 'S': - # print "Set SEG header" - ext = self.set_ext_header_seg() - elif self.pas == 'I': - ext = self.set_ext_header_sioseis() - - self.extended_header.set(ext) - -# MixIns - - -def units_stub(have, want): - """ - Finds the conversion multiplier needed. - """ - # Add more prefixes? - pref = {'yocto': 1e-24, 'micro': 1e-6, 'milli': 1e-3, 'centi': 1e-2, - 'deci': 1e-1, 'deka': 1e1, 'hecto': 1e2, 'kilo': 1e3, 'mega': 1e6} - h = None - w = None - for p in pref.keys(): - if have[:len(p)] == p: - h = pref[p] - if want[:len(p)] == p: - w = pref[p] - - if h is None: - h = 1.0 - if w is None: - w = 1.0 - - ret = h / w - - return ret - - -def fepoch(epoch, ms): - ''' - Given ascii epoch and miliseconds return epoch as a float. - ''' - epoch = float(int(epoch)) - secs = float(int(ms)) / 1000000.0 - - return epoch + secs diff --git a/ph5/utilities/segd2ph5.py b/ph5/utilities/segd2ph5.py index 6758ccf3..4dbeae71 100644 --- a/ph5/utilities/segd2ph5.py +++ b/ph5/utilities/segd2ph5.py @@ -960,44 +960,6 @@ def writeINDEX(): MAP_INFO = {} -def txncsptolatlon(northing, easting): - ''' - Sweetwater - Convert texas state plane coordinates in feet to - geographic coordinates, WGS84. - ''' - from pyproj import Proj, transform - # Texas NC state plane feet Zone 4202 - sp = Proj(init='epsg:32038') - # WGS84, geographic - wgs = Proj(init='epsg:4326', proj='latlong') - # Texas SP coordinates: survey foot is 1200/3937 meters - lon, lat = transform(sp, wgs, easting * 0.30480060960121924, - northing * 0.30480060960121924) - - return lat, lon - - -def utmcsptolatlon(northing, easting): - ''' - Mount Saint Helens - Convert UTM to - geographic coordinates, WGS84. - ''' - # UTM - new_UTM = re.split(r'(\d+)', UTM) - utmzone = str(new_UTM[1]) - - NS = str(new_UTM[2]).upper() - if NS != 'N' and NS != 'S': - NS = 'N' - -# OLD WAY - deprecated, called pyproj. New way uses ph5utils. - utm = ph5utils.UTMConversions() - lat, lon = utm.lat_long(easting, northing, utmzone, NS) - return lat, lon - - def correct_append_number(): # from math import modf traces = SD.reel_headers.extended_header_2['number_records'] @@ -1187,18 +1149,28 @@ def print_container(container): try: if UTM: # UTM - LAT, LON = utmcsptolatlon( + utmc = ph5utils.UTMConversions() + new_UTM = re.split(r'(\d+)', UTM) + utmzone = str(new_UTM[1]) + + NS = str(new_UTM[2]).upper() + if NS != 'N' and NS != 'S': + NS = 'N' + + LAT, LON = utmc.lat_long( SD.trace_headers.trace_header_N[ - 4].receiver_point_Y_final / 10., + 4].receiver_point_X_final / 10., SD.trace_headers.trace_header_N[ - 4].receiver_point_X_final / 10.) + 4].receiver_point_Y_final / 10., + utmzone, NS) elif TSPF: # Texas State Plane coordinates - LAT, LON = txncsptolatlon( + tspc = ph5utils.TSPConversions() + LAT, LON = tspc.lat_long( SD.trace_headers.trace_header_N[ - 4].receiver_point_Y_final / 10., + 4].receiver_point_X_final / 10., SD.trace_headers.trace_header_N[ - 4].receiver_point_X_final / 10.) + 4].receiver_point_Y_final / 10.) else: LAT = SD.trace_headers.trace_header_N[ 4].receiver_point_Y_final / 10. diff --git a/ph5/utilities/tests/test_utmtolatlong.py b/ph5/utilities/tests/test_utmtolatlong.py index 6e369cff..b69149bd 100644 --- a/ph5/utilities/tests/test_utmtolatlong.py +++ b/ph5/utilities/tests/test_utmtolatlong.py @@ -1,6 +1,7 @@ import unittest import pyproj from ph5.core.ph5utils import UTMConversions +from ph5.core.ph5utils import TSPConversions class TestUTMconversion(unittest.TestCase): @@ -15,6 +16,16 @@ def test_is_valid_utm_conversion_south(self): self.assertAlmostEqual(u.lat_long(540947, 1361594, 58, 'S'), (-77.81567398301094, 166.73816638798527)) + def test_is_valid_utm_conversion_inverse(self): + u = UTMConversions() # PIC Socorro + self.assertAlmostEqual(u.geod2utm(34.0734958, -106.9190959, 1456.0), + (3771967.003118457, 322916.0048106084, 1456.0)) + + def test_is_valid_tsp_conversion(self): + t = TSPConversions() # Sweetwater, Texas, units US FEET + self.assertAlmostEqual(t.lat_long(1380811, 6858888), + (-100.40568335900281, 32.468972700219375)) + def test_for_correct_type(self): with self.assertRaises(ValueError): u = UTMConversions() # 'heck' is not a float From fc789d5cb3455eef1e627f78149ceb5e2d1a52e7 Mon Sep 17 00:00:00 2001 From: David Thomas Date: Wed, 2 Oct 2019 16:51:12 -0600 Subject: [PATCH 05/25] re-integrated with segd2ph5 --- ph5/core/ph5utils.py | 2 +- ph5/utilities/segd2ph5.py | 148 ++++++++++++++++++-------------------- 2 files changed, 70 insertions(+), 80 deletions(-) diff --git a/ph5/core/ph5utils.py b/ph5/core/ph5utils.py index 770c4d63..acfdffca 100644 --- a/ph5/core/ph5utils.py +++ b/ph5/core/ph5utils.py @@ -18,7 +18,7 @@ from pyproj import Transformer import math -PROG_VERSION = "2019.239" +PROG_VERSION = "2019.275" class PH5Response(object): diff --git a/ph5/utilities/segd2ph5.py b/ph5/utilities/segd2ph5.py index 4dbeae71..8664c88e 100644 --- a/ph5/utilities/segd2ph5.py +++ b/ph5/utilities/segd2ph5.py @@ -16,8 +16,10 @@ import re from math import modf from ph5.core import experiment, columns, segdreader, ph5utils +from ph5 import LOGGING_FORMAT -PROG_VERSION = "2019.239" +PROG_VERSION = "2019.275" +logger = logging.getLogger(__name__) MAX_PH5_BYTES = 1073741824 * 100. # 100 GB (1024 X 1024 X 1024 X 2) @@ -33,14 +35,20 @@ # RE for mini files miniPH5RE = re.compile(r".*miniPH5_(\d\d\d\d\d)\.ph5") -# LSB = 6.402437066e-6 # From Malcolm UCSD -LSB00 = 2500. / (2 ** 23) # 0dB -LSB12 = 625. / (2 ** 23) # 12dB -LSB24 = 156. / (2 ** 23) # 24dB -LSB36 = 39. / (2 ** 23) # 36dB = 39mV full scale -LSB = LSB36 +# -2.5V to 2.5V +mV_full_scale = 5000 +# 24-bit +counts_full_scale = 2**24 -LSB_MAP = {36: LSB36, 24: LSB24, 12: LSB12, 0: LSB00} + +def bitweight(db): + # where db = 20log(V1,V2) + return (mV_full_scale / (10.**(db/20.))) / counts_full_scale + + +dbs = (0, 6, 12, 18, 24, 30, 36) +LSB_MAP = {db: bitweight(db) for db in dbs} +LSB = LSB_MAP[36] # Manufacturers codes FAIRFIELD = 20 @@ -117,8 +125,8 @@ def fn_sort(a, b): try: fh = file(infile) - except BaseException: - sys.stderr.write("Warning: Failed to open %s\n" % infile) + except Exception: + logger.warning("Failed to open %s\n" % infile) return while True: @@ -212,21 +220,21 @@ def get_args(): FILES.append(options.rawfile) if len(FILES) == 0: - sys.stderr.write("Error: No input file given.\n") - sys.exit() + raise Exception("No input file given.\n") # Set output file if options.outfile is not None: PH5 = options.outfile else: - sys.stderr.write("Error: No outfile (PH5) given.\n") - sys.exit() - - logging.basicConfig( - filename=os.path.join('.', "segd2ph5.log"), - format="%(asctime)s %(message)s", - level=logging.INFO - ) + raise Exception("No outfile (PH5) given.\n") + + # Write log to file + ch = logging.FileHandler("segd2ph5.log") + ch.setLevel(logging.INFO) + # Add formatter + formatter = logging.Formatter(LOGGING_FORMAT) + ch.setFormatter(formatter) + logger.addHandler(ch) # Need to process in order: R309_674.1.0.rg16, 309 == line, # 674 = receiver point, 1 = first file # Sorted where the file list is read... @@ -252,7 +260,6 @@ def openPH5(filename): return EXREC except BaseException: pass - # sys.stderr.write ("*** Opening: {0} ".format (filename)) exrec = experiment.ExperimentGroup(nickname=filename) exrec.ph5open(True) exrec.initgroup() @@ -277,7 +284,7 @@ def update_index_t_info(starttime, samples, sps): DAS_INFO[das].append(di) MAP_INFO[das].append(dm) - logging.info( + logger.info( "DAS: {0} File: {1} First Sample: {2} Last Sample: {3}".format( das, ph5file, time.ctime(starttime), time.ctime(stoptime))) @@ -286,9 +293,7 @@ def update_external_references(): ''' Update external references in master.ph5 to miniPH5 files in Receivers_t ''' global F - # sys.stderr.write ("Updating external references...\n"); - # sys.stderr.flush () - logging.info("Updating external references...") + logger.info("Updating external references...") n = 0 for i in INDEX_T_DAS.rows: external_file = i['external_file_name_s'][2:] @@ -313,12 +318,9 @@ def update_external_references(): n += 1 except Exception as e: # pass - sys.stderr.write("{0}\n".format(e.message)) + logger.error("{0}\n".format(e.message)) - # sys.exit () - # sys.stderr.write ("done, {0} das nodes recreated.\n".format (n)) - - logging.info("done, {0} das nodes recreated.\n".format(n)) + logger.info("done, {0} das nodes recreated.\n".format(n)) n = 0 for i in INDEX_T_MAP.rows: @@ -350,11 +352,9 @@ def update_external_references(): n += 1 except Exception as e: # pass - sys.stderr.write("{0}\n".format(e.message)) + logger.error("{0}\n".format(e.message)) - # sys.exit () - # sys.stderr.write ("done, {0} map nodes recreated.\n".format (n)) - logging.info("done, {0} map nodes recreated.\n".format(n)) + logger.info("done, {0} map nodes recreated.\n".format(n)) def get_current_data_only(size_of_data, das=None): @@ -497,7 +497,7 @@ def process_das(): p_das_t['receiver_table_n_i'] = M[get_true_channel()] else: p_das_t['receiver_table_n_i'] = 0 # 0 -> Z - logging.warn( + logger.warning( "Header channel set: {0}. Check Receiver_t entries!".format( th.trace_header.channel_set)) @@ -509,7 +509,7 @@ def process_das(): try: trace_epoch = th.trace_header_N[2].shot_epoch except Exception as e: - logging.warn("Failed to read shot epoch: {0}.".format(e.message)) + logger.warning("Failed to read shot epoch: {0}.".format(e.message)) trace_epoch = 0. f, i = modf(trace_epoch / 1000000.) @@ -548,7 +548,7 @@ def process_das(): try: p_response_t['gain/value_i'] = th.trace_header_N[3].preamp_gain_db except Exception as e: - logging.warn( + logger.warning( "Failed to read trace pre amp gain: {0}.".format(e.message)) p_response_t['gain/value_i'] = 0. p_response_t['gain/units_s'] = 'Unknown' @@ -576,8 +576,8 @@ def process_das(): # Failed, leave as float # for x in tr : print x/LSB # print e.message - sys.stderr.write( - "Warning: Could not convert trace to counts. max: {1},\ + logger.warning( + "Could not convert trace to counts. max: {1},\ min {2}\n{0}".format( e.message, tr.max(), tr.min())) p_response_t['bit_weight/value_d'] = 1. @@ -700,7 +700,7 @@ def seen_sta(): try: f, i = modf(rh.extended_header_1.epoch_deploy / 1000000.) except Exception as e: - logging.warn( + logger.warning( "Failed to read extended header 1 deploy epoch: {0}.".format( e.message)) f = i = 0. @@ -711,7 +711,7 @@ def seen_sta(): try: f, i = modf(rh.extended_header_1.epoch_pickup / 1000000.) except Exception as e: - logging.warn( + logger.warning( "Failed to read extended header 1 pickup epoch: {0}.".format( e.message)) f = i = 0. @@ -728,7 +728,7 @@ def seen_sta(): else: p_array_t['das/model_s'] = DM[1] except Exception as e: - logging.warn( + logger.warning( "Failed to read channel sets per scan: {0}.".format(e.message)) p_array_t['das/model_s'] = 'zland-[13]C' p_array_t['das/serial_number_s'] = Das @@ -761,7 +761,7 @@ def seen_sta(): p_array_t['location/Z/value_d'] =\ th.trace_header_N[4].receiver_point_depth_final / 10. except Exception as e: - logging.warn( + logger.warning( "Failed to read receiver point depth: {0}.".format(e.message)) p_array_t['location/Z/value_d'] = 0. @@ -771,14 +771,15 @@ def seen_sta(): p_array_t['description_s'] = "DAS: {0}, Node ID: {1}".format( Das, rh.extended_header_1.id_number) except Exception as e: - logging.warn( + logger.warning( "Failed to read extended header 1 ID number: {0}.".format( e.message)) try: line = th.trace_header_N[4].line_number except Exception as e: - logging.warn("Failed to read line number: {0}.".format(e.message)) + logger.warning("Failed to read line number: {0}.".format( + e.message)) line = 0 chan_set = get_true_channel() @@ -1023,11 +1024,15 @@ def print_container(container): print '-' * 80 - get_args() + try: + get_args() + except Exception, err_msg: + logger.error(err_msg) + return 1 initializeExperiment() - logging.info("segd2ph5 {0}".format(PROG_VERSION)) - logging.info("{0}".format(sys.argv)) + logger.info("segd2ph5 {0}".format(PROG_VERSION)) + logger.info("{0}".format(sys.argv)) if len(FILES) > 0: RESP = Resp(EX.ph5_g_responses) rows, keys = EX.ph5_g_receivers.read_index() @@ -1042,9 +1047,7 @@ def print_container(container): try: SIZE = os.path.getsize(f) except Exception as e: - sys.stderr.write("Error: failed to read {0}, {1}.\ - Skipping...\n".format(f, str(e.message))) - logging.error("Error: failed to read {0}, {1}.\ + logger.error("Failed to read {0}, {1}.\ Skipping...\n".format(f, str(e.message))) continue @@ -1055,9 +1058,7 @@ def print_container(container): RH = False # print "isSEGD" if not SD.isSEGD(expected_manufactures_code=MANUFACTURERS_CODE): - sys.stdout.write(":: {0}\n".format(SD.name())) - sys.stdout.flush() - logging.info( + logger.error( "{0} is not a Fairfield SEG-D file. Skipping.".format( SD.name())) continue @@ -1072,10 +1073,8 @@ def print_container(container): # print "external headers" SD.process_external_headers() except segdreader.InputsError as e: - sys.stdout.write(":: {0}\n".format("".join(e.message))) - sys.stdout.flush() - logging.info( - "Error: Possible bad SEG-D file -- {0}".format( + logger.error( + "Possible bad SEG-D file -- {0}".format( "".join(e.message))) continue @@ -1087,16 +1086,12 @@ def print_container(container): part_number, node_id, number_of_channels = get_node(SD) # EXREC = get_current_data_only(SIZE, Das) - # sys.stderr.write ("Processing: {0}... Size: {1}\n".format\ - # (SD.name (), SIZE)) - sys.stdout.write(":: {0}\n".format(SD.name())) - sys.stdout.flush() - logging.info( + logger.info(":: {0}\n".format(SD.name())) + logger.info( "Processing: {0}... Size: {1}\n".format(SD.name(), SIZE)) if EXREC.filename != MINIPH5: - # sys.stderr.write ("Opened: {0}...\n".format (EXREC.filename)) - logging.info("Opened: {0}...\n".format(EXREC.filename)) - logging.info( + logger.info("Opened: {0}...\n".format(EXREC.filename)) + logger.info( "DAS: {0}, Node ID: {1}, PN: {2}, Channels: {3}".format( Das, node_id, part_number, number_of_channels)) MINIPH5 = EXREC.filename @@ -1136,12 +1131,9 @@ def print_container(container): try: trace, cs = SD.process_trace() except segdreader.InputsError as e: - # sys.stderr.write ("Error 2: Possible bad SEG-D file \ - # -- {0}".format ("".join (e))) - sys.stdout.write(": {0}\n".format(F)) - sys.stdout.flush() - logging.info( - "Error: Possible bad SEG-D file -- {0}".format( + logger.error("{0}\n".format(F)) + logger.error( + "Possible bad SEG-D file -- {0}".format( "".join(e.message))) break @@ -1177,7 +1169,7 @@ def print_container(container): LON = SD.trace_headers.trace_header_N[ 4].receiver_point_X_final / 10. except Exception as e: - logging.warn( + logger.warning( "Failed to convert location: {0}.\n".format( e.message)) @@ -1241,8 +1233,7 @@ def print_container(container): for line in TRACE_JSON: log_array.append(line) - sys.stdout.write(":: {0}\n".format(F)) - sys.stdout.flush() + logger.info(":: {0}\n".format(F)) write_arrays(ARRAY_T) seconds = time.time() - then @@ -1251,11 +1242,10 @@ def print_container(container): EX.ph5close() EXREC.ph5close() except Exception as e: - sys.stderr.write("Warning: {0}\n".format("".join(e.message))) + logger.warning("{0}\n".format("".join(e.message))) - print "Done...{0:b}".format(int(seconds / 6.)) # Minutes X 10 - logging.info("Done...{0:b}".format(int(seconds / 6.))) - logging.shutdown + logger.info("Done...{0:b}".format(int(seconds / 6.))) + logging.shutdown() prof() From 5a194eeb80589a5e6f1d5695f2ed0dfb8d4d12fc Mon Sep 17 00:00:00 2001 From: David Thomas Date: Mon, 21 Oct 2019 17:51:00 -0600 Subject: [PATCH 06/25] reconciling pyproj utilities --- ph5/core/ph5api.py | 24 ++++-------------------- ph5/core/ph5utils.py | 19 ++++++++++++++++++- ph5/core/segyfactory.py | 26 +++++--------------------- ph5/core/tests/test_ph5api.py | 6 +++--- ph5/utilities/novenqc.py | 11 +++++------ 5 files changed, 35 insertions(+), 51 deletions(-) diff --git a/ph5/core/ph5api.py b/ph5/core/ph5api.py index b71a310a..1748e1e6 100755 --- a/ph5/core/ph5api.py +++ b/ph5/core/ph5api.py @@ -11,8 +11,7 @@ import re import numpy as np import math -from pyproj import Geod -from ph5.core import columns, experiment, timedoy +from ph5.core import columns, experiment, timedoy, ph5utils from tables.exceptions import NoSuchNodeError PROG_VERSION = '2019.93' @@ -329,6 +328,7 @@ def get_offset(self, sta_line, sta_id, evt_line, evt_id): LOGGER.warning("Couldn't get offset.") return {} try: + geod = ph5utils.Geodesics() if sta_line in self.Array_t and evt_line in self.Event_t: array_t = self.Array_t[sta_line]['byid'][sta_id][c] event_t = self.Event_t[evt_line]['byid'][evt_id] @@ -336,7 +336,8 @@ def get_offset(self, sta_line, sta_id, evt_line, evt_id): lat0 = array_t[0]['location/Y/value_d'] lon1 = event_t['location/X/value_d'] lat1 = event_t['location/Y/value_d'] - az, baz, dist = run_geod(lat0, lon0, lat1, lon1) + az, baz, dist = geod.run_geod(lat0, lon0, lat1, lon1, + FACTS_M[UNITS]) except Exception as e: LOGGER.warning("Couldn't get offset. {0}".format(repr(e))) return {} @@ -1664,23 +1665,6 @@ def by_id(rows, key='id_s', secondary_key=None, unique_key=True): return byid, order -def run_geod(lat0, lon0, lat1, lon1): - UNITS = 'm' - ELLIPSOID = 'WGS84' - - config = "+ellps={0}".format(ELLIPSOID) - - g = Geod(config) - - az, baz, dist = g.inv(lon0, lat0, lon1, lat1) - - if dist: - dist /= FACTS_M[UNITS] - - # Return list containing azimuth, back azimuth, distance - return az, baz, dist - - def rect(r, w, deg=0): # Convert from polar to rectangular coordinates # radian if deg=0; degree if deg=1 diff --git a/ph5/core/ph5utils.py b/ph5/core/ph5utils.py index acfdffca..c14c98d3 100644 --- a/ph5/core/ph5utils.py +++ b/ph5/core/ph5utils.py @@ -15,7 +15,7 @@ from ph5.core.timedoy import epoch2passcal, passcal2epoch, TimeDOY, TimeError import time import re -from pyproj import Transformer +from pyproj import Transformer, Geod import math PROG_VERSION = "2019.275" @@ -120,6 +120,23 @@ def lat_long(self, easting, northing): lon, lat = transformer.transform(easting, northing) return (lon, lat) +class Geodesics: # added 2019-10-21 dthomas, consolidating from other locs + + def run_geod(lat0, lon0, lat1, lon1, scalar =1.0): + ELLIPSOID = 'WGS84' + + config = "+ellps={0}".format(ELLIPSOID) + + g = Geod(config) + + az, baz, dist = g.inv(lon0, lat0, lon1, lat1) + + if dist: + dist /= scalar + + # Return list containing azimuth, back azimuth, distance + return az, baz, dist + """ =============== diff --git a/ph5/core/segyfactory.py b/ph5/core/segyfactory.py index bdb32396..026150be 100644 --- a/ph5/core/segyfactory.py +++ b/ph5/core/segyfactory.py @@ -16,8 +16,7 @@ import string import sys import logging -from pyproj import Geod -from ph5.core import ph5utils # for geod2utm +from ph5.core import ph5utils # for geod2utm, run_geod from ph5.core import segy_h, ebcdic PROG_VERSION = '2018.268' @@ -871,10 +870,12 @@ def set_trace_header(self): tra['sourceToRecDist'] = self.offset_t['offset/value_d'] else: try: - az_baz_dist = run_geod(self.event_t['location/Y/value_d'], + geod = ph5utils.Geodesics() + az_baz_dist = geod.run_geod(self.event_t['location/Y/value_d'], self.event_t['location/X/value_d'], self.array_t['location/Y/value_d'], - self.array_t['location/X/value_d']) + self.array_t['location/X/value_d'], + FACTS[UNITS]) tra['sourceToRecDist'] = az_baz_dist[2] except Exception as e: # sys.stderr.write (e.message) @@ -1056,23 +1057,6 @@ def pick_values_32(X, Y): return sx, vx, vy -def run_geod(lat0, lon0, lat1, lon1): - ELLIPSOID = 'WGS84' - UNITS = 'm' - - config = "+ellps={0}".format(ELLIPSOID) - - g = Geod(config) - - az, baz, dist = g.inv(lon0, lat0, lon1, lat1) - - if dist: - dist /= FACTS[UNITS] - - # Return list containing azimuth, back azimuth, distance - return az, baz, dist - - # # Write standard SEG-Y reel header # diff --git a/ph5/core/tests/test_ph5api.py b/ph5/core/tests/test_ph5api.py index 466e1915..4c16020c 100644 --- a/ph5/core/tests/test_ph5api.py +++ b/ph5/core/tests/test_ph5api.py @@ -448,7 +448,7 @@ def test_response_t(self): self.assertEqual(keys, self.ph5API_object.Response_t['keys']) # check an entry to make sure it is what we expect - self.assertEqual(1.85966491699e-05, + self.assertEqual(1.88039941931e-05, self.ph5API_object.Response_t['rows'][0] ['bit_weight/value_d']) self.assertEqual('/Experiment_g/Responses_g/ZLAND3C_500_1_24', @@ -925,10 +925,10 @@ def test_cut(self): # should start half way thorugh data array # check a few samples # should match sample 2500 in array 005 - self.assertEqual(1331852800, + self.assertEqual(1317166976, traces[0].data[0]) # should match sample 4999 in array 005 - self.assertEqual(-123947064, + self.assertEqual(-122580344, traces[0].data[-1]) def test_get_extent(self): diff --git a/ph5/utilities/novenqc.py b/ph5/utilities/novenqc.py index 64e6bcd4..80e918c4 100644 --- a/ph5/utilities/novenqc.py +++ b/ph5/utilities/novenqc.py @@ -12,7 +12,7 @@ from random import randint from inspect import stack from ast import literal_eval -from pyproj import Geod +from ph5.core import ph5utils # for run_geod import simplekml as kml from ph5.core import timedoy @@ -215,16 +215,15 @@ def qc_dist(ys, xs, zs): # # Need to check for UTM and convert to lat/lon # + geod = ph5utils.Geodesics() units = 'm' - ellipsoid = 'WGS84' - config = "+ellps={0}".format(ellipsoid) - g = Geod(config) - az, baz, dist = g.inv(xs[0], ys[0], xs[1], ys[1]) + az, baz, dist = geod.run_geod(ys[0], xs[0], ys[1], xs[1], + FACTS[units]) if len(zs) > 1: zdelta = float(zs[1]) - float(zs[0]) else: zdelta = 0.0 - return az, baz, dist / FACTS[units], zdelta + return az, baz, dist, zdelta def qc_time(t1, t2): ''' From 26a933f817e8f5fbcd5ba5704e88accbbc4062e4 Mon Sep 17 00:00:00 2001 From: David Thomas Date: Tue, 22 Oct 2019 10:26:48 -0600 Subject: [PATCH 07/25] flake8 updates --- ph5/core/ph5api.py | 1 + ph5/core/ph5utils.py | 3 ++- ph5/core/segyfactory.py | 9 +++++---- ph5/utilities/novenqc.py | 2 +- 4 files changed, 9 insertions(+), 6 deletions(-) diff --git a/ph5/core/ph5api.py b/ph5/core/ph5api.py index 1748e1e6..73a6579a 100755 --- a/ph5/core/ph5api.py +++ b/ph5/core/ph5api.py @@ -329,6 +329,7 @@ def get_offset(self, sta_line, sta_id, evt_line, evt_id): return {} try: geod = ph5utils.Geodesics() + UNITS = 'm' if sta_line in self.Array_t and evt_line in self.Event_t: array_t = self.Array_t[sta_line]['byid'][sta_id][c] event_t = self.Event_t[evt_line]['byid'][evt_id] diff --git a/ph5/core/ph5utils.py b/ph5/core/ph5utils.py index c14c98d3..59407c73 100644 --- a/ph5/core/ph5utils.py +++ b/ph5/core/ph5utils.py @@ -120,9 +120,10 @@ def lat_long(self, easting, northing): lon, lat = transformer.transform(easting, northing) return (lon, lat) + class Geodesics: # added 2019-10-21 dthomas, consolidating from other locs - def run_geod(lat0, lon0, lat1, lon1, scalar =1.0): + def run_geod(lat0, lon0, lat1, lon1, scalar=1.0): ELLIPSOID = 'WGS84' config = "+ellps={0}".format(ELLIPSOID) diff --git a/ph5/core/segyfactory.py b/ph5/core/segyfactory.py index 026150be..964d6309 100644 --- a/ph5/core/segyfactory.py +++ b/ph5/core/segyfactory.py @@ -871,11 +871,12 @@ def set_trace_header(self): else: try: geod = ph5utils.Geodesics() + UNITS = 'm' az_baz_dist = geod.run_geod(self.event_t['location/Y/value_d'], - self.event_t['location/X/value_d'], - self.array_t['location/Y/value_d'], - self.array_t['location/X/value_d'], - FACTS[UNITS]) + self.event_t['location/X/value_d'], + self.array_t['location/Y/value_d'], + self.array_t['location/X/value_d'], + FACTS[UNITS]) tra['sourceToRecDist'] = az_baz_dist[2] except Exception as e: # sys.stderr.write (e.message) diff --git a/ph5/utilities/novenqc.py b/ph5/utilities/novenqc.py index 80e918c4..2657f511 100644 --- a/ph5/utilities/novenqc.py +++ b/ph5/utilities/novenqc.py @@ -218,7 +218,7 @@ def qc_dist(ys, xs, zs): geod = ph5utils.Geodesics() units = 'm' az, baz, dist = geod.run_geod(ys[0], xs[0], ys[1], xs[1], - FACTS[units]) + FACTS[units]) if len(zs) > 1: zdelta = float(zs[1]) - float(zs[0]) else: From 5c066477ace52bcd17a9c1a5bf68be90b541a2da Mon Sep 17 00:00:00 2001 From: David Thomas Date: Tue, 22 Oct 2019 12:08:47 -0600 Subject: [PATCH 08/25] segd2ph5 version number set to 2019.252. --- ph5/utilities/segd2ph5.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ph5/utilities/segd2ph5.py b/ph5/utilities/segd2ph5.py index 8664c88e..b9e9f082 100644 --- a/ph5/utilities/segd2ph5.py +++ b/ph5/utilities/segd2ph5.py @@ -18,7 +18,7 @@ from ph5.core import experiment, columns, segdreader, ph5utils from ph5 import LOGGING_FORMAT -PROG_VERSION = "2019.275" +PROG_VERSION = "2019.252" logger = logging.getLogger(__name__) MAX_PH5_BYTES = 1073741824 * 100. # 100 GB (1024 X 1024 X 1024 X 2) From d3e5cd2b22ecf803dabba10c5baaf18799c45b4e Mon Sep 17 00:00:00 2001 From: David Thomas Date: Wed, 23 Oct 2019 14:20:41 -0600 Subject: [PATCH 09/25] Updating pyproj version to 2.2 --- environment.yml | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/environment.yml b/environment.yml index bad7fc31..da01027c 100644 --- a/environment.yml +++ b/environment.yml @@ -9,7 +9,7 @@ dependencies: - nose - numpy - numexpr - - pyproj + - pyproj=2.2 - psutil - obspy - vispy diff --git a/setup.py b/setup.py index 393efe2e..19404a75 100644 --- a/setup.py +++ b/setup.py @@ -54,7 +54,7 @@ 'nose', 'numpy', 'numexpr', - 'pyproj', + 'pyproj>=2', 'psutil', 'obspy', 'vispy', From 4942cff6bd657e2679b4dd0a192e463b1b8baf1b Mon Sep 17 00:00:00 2001 From: David Thomas Date: Wed, 23 Oct 2019 14:44:14 -0600 Subject: [PATCH 10/25] pyproj update, flake8 2 --- ph5/entry_points.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/ph5/entry_points.py b/ph5/entry_points.py index b47a2b43..72a28b3e 100644 --- a/ph5/entry_points.py +++ b/ph5/entry_points.py @@ -272,6 +272,11 @@ def __init__(self): 'ph5.clients.ph5tostationxml:main', 'Takes PH5 files and returns StationXML.', type=EntryPointTypes.CLIENT), + EntryPoint('ph5availability', + 'ph5.clients.ph5availability:main', + 'Takes PH5 files and returns time series ' + 'availability info.', + type=EntryPointTypes.CLIENT), EntryPoint('utmtolatlong', 'ph5.utilities.utmtolatlong:main', 'Converts UTM coordinates to Latitudes' From 18fba3eeaa634fc31b78566a324c618a3babfc1b Mon Sep 17 00:00:00 2001 From: David Thomas Date: Thu, 24 Oct 2019 15:30:14 -0600 Subject: [PATCH 11/25] fixed ph5utils declaration of run_geod --- ph5/core/ph5utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ph5/core/ph5utils.py b/ph5/core/ph5utils.py index 59407c73..6091b30f 100644 --- a/ph5/core/ph5utils.py +++ b/ph5/core/ph5utils.py @@ -123,7 +123,7 @@ def lat_long(self, easting, northing): class Geodesics: # added 2019-10-21 dthomas, consolidating from other locs - def run_geod(lat0, lon0, lat1, lon1, scalar=1.0): + def run_geod(self, lat0, lon0, lat1, lon1, scalar=1.0): ELLIPSOID = 'WGS84' config = "+ellps={0}".format(ELLIPSOID) From e3b18fe1150e15d1270e5c4e16b9d1886f9f593b Mon Sep 17 00:00:00 2001 From: David Thomas Date: Fri, 25 Oct 2019 12:42:25 -0600 Subject: [PATCH 12/25] refactored utm tests --- ph5/utilities/tests/test_utmtolatlong.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/ph5/utilities/tests/test_utmtolatlong.py b/ph5/utilities/tests/test_utmtolatlong.py index b69149bd..1b9686cf 100644 --- a/ph5/utilities/tests/test_utmtolatlong.py +++ b/ph5/utilities/tests/test_utmtolatlong.py @@ -8,23 +8,27 @@ class TestUTMconversion(unittest.TestCase): def test_is_valid_utm_conversion(self): u = UTMConversions() # PIC Socorro - self.assertAlmostEqual(u.lat_long(322916, 3771967, 13, 'N'), - (34.07349577107704, -106.91909595147378)) + lat, lon = u.lat_long(322916, 3771967, 13, 'N') + self.assertAlmostEqual(lat, 34.07349577107704) + self.assertAlmostEqual(lon, -106.91909595147378) def test_is_valid_utm_conversion_south(self): u = UTMConversions() # Castle Rock Antarctica - self.assertAlmostEqual(u.lat_long(540947, 1361594, 58, 'S'), - (-77.81567398301094, 166.73816638798527)) + lat, lon = u.lat_long(540947, 1361594, 58, 'S') + self.assertAlmostEqual(lat, -77.81567398301094) + self.assertAlmostEqual(lon, 166.73816638798527) def test_is_valid_utm_conversion_inverse(self): u = UTMConversions() # PIC Socorro - self.assertAlmostEqual(u.geod2utm(34.0734958, -106.9190959, 1456.0), - (3771967.003118457, 322916.0048106084, 1456.0)) + northing, easting, elev = u.geod2utm(34.0734958, -106.9190959, 1456.0) + self.assertAlmostEqual(northing, 3771967.003118457) + self.assertAlmostEqual(easting, 322916.0048106084) def test_is_valid_tsp_conversion(self): t = TSPConversions() # Sweetwater, Texas, units US FEET - self.assertAlmostEqual(t.lat_long(1380811, 6858888), - (-100.40568335900281, 32.468972700219375)) + lon, lat = t.lat_long(1380811, 6858888) + self.assertAlmostEqual(lon, -100.40568335900281) + self.assertAlmostEqual(lat, 32.468972700219375) def test_for_correct_type(self): with self.assertRaises(ValueError): From bfc83fd7b8699140c45da503c50b1a448f75387c Mon Sep 17 00:00:00 2001 From: David Thomas Date: Tue, 17 Dec 2019 15:21:42 -0700 Subject: [PATCH 13/25] Latest updates --- ph5/core/ph5utils.py | 23 +++- ph5/entry_points.py | 10 +- ph5/utilities/tests/test_utmtolatlong.py | 17 ++- ph5/utilities/utm_latlong.py | 154 +++++++++++++++++++++++ ph5/utilities/utmtolatlong.py | 86 ------------- 5 files changed, 192 insertions(+), 98 deletions(-) create mode 100644 ph5/utilities/utm_latlong.py delete mode 100644 ph5/utilities/utmtolatlong.py diff --git a/ph5/core/ph5utils.py b/ph5/core/ph5utils.py index 6091b30f..6dc9a05b 100644 --- a/ph5/core/ph5utils.py +++ b/ph5/core/ph5utils.py @@ -18,7 +18,7 @@ from pyproj import Transformer, Geod import math -PROG_VERSION = "2019.275" +PROG_VERSION = "2019.330" class PH5Response(object): @@ -72,7 +72,7 @@ def get_n_i(self, sensor_keys, datalogger_keys): class UTMConversions: # updated 2019-09-27 dthomas - def lat_long(self, easting, northing, zone, hemisphere): + def utm2latlong(self, easting, northing, zone, hemisphere): # utm to lat/long conversion epsg_wgs84 = "EPSG:4326" side = hemisphere[0].upper() @@ -94,7 +94,7 @@ def lon2zone(self, lon): zone = int(math.floor((lon + 180.)/6.0) % 60) + 1 return zone - def geod2utm(self, lat, lon, elev): + def geod2utm(self, lat, lon, elev): # utility function # lat/long to UTM conversion zone = self.lon2zone(lon) epsg_wgs84 = "EPSG:4326" @@ -109,6 +109,23 @@ def geod2utm(self, lat, lon, elev): easting, northing = transformer.transform(lon, lat) return (northing, easting, elev) # e.g. Y, X, Z + def latlong2utm(self, lat, lon): + # lat/long to UTM conversion + zone = self.lon2zone(float(lon)) + epsg_wgs84 = "EPSG:4326" + if lat < 0.0: + epsgroot = "327" + hemisphere = 'S' + elif lat >= 0.0: + epsgroot = "326" + hemisphere = 'N' + epsg_utm = "EPSG:" + epsgroot + str(zone) + + transformer = Transformer.from_crs(epsg_wgs84, epsg_utm, + always_xy=True) + easting, northing = transformer.transform(lon, lat) + return (easting, northing, zone, hemisphere) + class TSPConversions: # added 2019-09-30 dthomas, Texas State Plane Coords diff --git a/ph5/entry_points.py b/ph5/entry_points.py index 72a28b3e..07048562 100644 --- a/ph5/entry_points.py +++ b/ph5/entry_points.py @@ -5,7 +5,7 @@ # Dave Thomas, 2019-08-06 -PROG_VERSION = '2019.239' +PROG_VERSION = '2019.330' class EntryPointTypes(): @@ -277,10 +277,10 @@ def __init__(self): 'Takes PH5 files and returns time series ' 'availability info.', type=EntryPointTypes.CLIENT), - EntryPoint('utmtolatlong', - 'ph5.utilities.utmtolatlong:main', - 'Converts UTM coordinates to Latitudes' - ' & Longitudes.', + EntryPoint('utm_latlong', + 'ph5.utilities.utm_latlong:main', + 'Converts UTM coordinates to/from' + ' Latitudes & Longitudes.', type=EntryPointTypes.INGESTION), ] } diff --git a/ph5/utilities/tests/test_utmtolatlong.py b/ph5/utilities/tests/test_utmtolatlong.py index 1b9686cf..e39d420a 100644 --- a/ph5/utilities/tests/test_utmtolatlong.py +++ b/ph5/utilities/tests/test_utmtolatlong.py @@ -8,13 +8,13 @@ class TestUTMconversion(unittest.TestCase): def test_is_valid_utm_conversion(self): u = UTMConversions() # PIC Socorro - lat, lon = u.lat_long(322916, 3771967, 13, 'N') + lat, lon = u.utm2latlong(322916, 3771967, 13, 'N') self.assertAlmostEqual(lat, 34.07349577107704) self.assertAlmostEqual(lon, -106.91909595147378) def test_is_valid_utm_conversion_south(self): u = UTMConversions() # Castle Rock Antarctica - lat, lon = u.lat_long(540947, 1361594, 58, 'S') + lat, lon = u.utm2latlong(540947, 1361594, 58, 'S') self.assertAlmostEqual(lat, -77.81567398301094) self.assertAlmostEqual(lon, 166.73816638798527) @@ -24,6 +24,15 @@ def test_is_valid_utm_conversion_inverse(self): self.assertAlmostEqual(northing, 3771967.003118457) self.assertAlmostEqual(easting, 322916.0048106084) + def test_is_valid_utm_conversion_fancyinverse(self): + u = UTMConversions() # PIC Socorro + easting, northing, zone, hemisphere = \ + u.latlong2utm(34.0734958, -106.9190959) + self.assertAlmostEqual(easting, 322916.0048106084) + self.assertAlmostEqual(northing, 3771967.003118457) + self.assertEqual(zone, 13) + self.assertAlmostEqual(hemisphere, 'N') + def test_is_valid_tsp_conversion(self): t = TSPConversions() # Sweetwater, Texas, units US FEET lon, lat = t.lat_long(1380811, 6858888) @@ -33,12 +42,12 @@ def test_is_valid_tsp_conversion(self): def test_for_correct_type(self): with self.assertRaises(ValueError): u = UTMConversions() # 'heck' is not a float - u.lat_long('heck', 'no', 58, 'S') + u.utm2latlong('heck', 'no', 58, 'S') def test_for_correct_value(self): # 666 is invalid UTM zone with self.assertRaises(pyproj.exceptions.CRSError): u = UTMConversions() - u.lat_long(540947, 1361594, 666, 'S') + u.utm2latlong(540947, 1361594, 666, 'S') if __name__ == "__main__": diff --git a/ph5/utilities/utm_latlong.py b/ph5/utilities/utm_latlong.py new file mode 100644 index 00000000..88270261 --- /dev/null +++ b/ph5/utilities/utm_latlong.py @@ -0,0 +1,154 @@ +# This script is used to convert UTM coordinates to/from Latitude/Longitude +# Dave Thomas, 2019-11-26 + +from __future__ import (print_function) +from ph5.core import ph5utils +import argparse +import logging + +PROG_VERSION = '2019.330' +LOGGER = logging.getLogger(__name__) + + +def convert_file_from_utm(infile, outfile): # batch file method + counter = 0 + with open(outfile, "w") as theoutfile, open(infile) as theinfile: + ws = "Easting, Northing, Zone, Hemisphere, => Longitude, Latitude\n" + theoutfile.write(ws) + for line in theinfile: + counter = counter + 1 + if counter == 1: + continue # skip header line + fieldset = line.strip().split(",") + if len(fieldset) <= 1: + continue + easting = float(fieldset[0]) + northing = float(fieldset[1]) + zone = int(fieldset[2]) + hemisphere = str(fieldset[3]).upper() + try: + utm = ph5utils.UTMConversions() + lat, lon = utm.utm2latlong(easting, northing, zone, hemisphere) + ps = "easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" + printstr1 = (ps % (easting, northing, zone, hemisphere)) + printstr2 = "Lon = %.7f, Lat = %.7f" % (lon, lat) + print (printstr1 + " => " + printstr2) + outstr = "%.1f, %.1f, %d, %s, %.7f, %.7f" \ + % (easting, northing, zone, hemisphere, lon, lat) + theoutfile.write(outstr + "\n") + except ValueError: + print ("There was an error in UTM file conversion.") + return + + +def convert_file_from_latlong(infile, outfile): # batch file method + counter = 0 + with open(outfile, "w") as theoutfile, open(infile) as theinfile: + ws = "Longitude, Latitude, =>Easting, Northing, Zone, Hemisphere\n" + theoutfile.write(ws) + for line in theinfile: + counter = counter + 1 + if counter == 1: + continue # skip header line + fieldset = line.strip().split(",") + if len(fieldset) <= 1: + continue + lat = float(fieldset[0]) + lon = float(fieldset[1]) + try: + utm = ph5utils.UTMConversions() + easting, northing, zone, hemisphere = utm.latlong2utm(lat, lon) + ps = "easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" + printstr1 = "Lon = %.7f, Lat = %.7f" % (lon, lat) + printstr2 = (ps % (easting, northing, zone, hemisphere)) + print (printstr1 + " => " + printstr2) + outstr = "%.7f, %.7f, %.1f, %.1f, %d, %s" \ + % (lon, lat, easting, northing, zone, hemisphere) + theoutfile.write(outstr + "\n") + except ValueError: + print ("There was an error in UTM file conversion.") + return + + +def doinline_from_utm(easting, northing, zone, hemisphere): # interactive + print ("easting=%s, northing=%s, zone=%s, hemisphere=%s" + % (easting, northing, zone, hemisphere)) + try: + utm = ph5utils.UTMConversions() + indat = ("Input: easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" + % (float(easting), float(northing), int(zone), hemisphere)) + print (indat) + lat, lon = utm.utm2latlong(float(easting), float(northing), + int(zone), hemisphere) + outstr = "Lon = %.7f, Lat = %.7f" % (lon, lat) + print (outstr) + print (''.join('*'*50)) + except ValueError: + print ("There was an error in UTM data conversion.") + return + + +def doinline_from_latlong(lon, lat): # interactive + try: + utm = ph5utils.UTMConversions() + indat = "Input: lon=%.7f, lat=%.7f" % (float(lon), float(lat)) + print (indat) + easting, northing, zone, hemisphere = utm.latlong2utm(lat, lon) + outstr = "easting=%.1f, northing=%.1f, zone=%s, hemisphere=%s" \ + % (float(easting), float(northing), zone, hemisphere) + print (outstr) + print (''.join('*'*50)) + except ValueError: + print ("There was an error in UTM data conversion.") + return + + +def main(): + desc = 'Convert UTM locations to/from longitudes, latitudes, \ +in batches (-i, -o) or inline (-e,-n,-z,-s), (-x,-y)' + parser = argparse.ArgumentParser(description=desc) + group1 = parser.add_mutually_exclusive_group() + group1.add_argument("-u", "--utm", help="Convert UTM to Lat/Long", + action="store_true") + group1.add_argument("-l", "--latlong", help="Convert Lat/Long to UTM", + action="store_true") + parser.add_argument("-i", "--infile", help="Input file with UTM \ +Easting, Northing, Zone, Hemisphere, or Lats and Longs") + parser.add_argument("-o", "--outfile", help="Output file with \ +converted longitudes, latitudes or UTM Coordinates") + parser.add_argument("-e", "--easting", help="Enter UTM Easting") + parser.add_argument("-n", "--northing", help="Enter UTM Northing") + parser.add_argument("-z", "--zone", help="Enter UTM Zone") + parser.add_argument("-s", "--side", help="Side=Hemisphere, N or S") + parser.add_argument("-x", "--longitude", help="Enter longitude") + parser.add_argument("-y", "--latitude", help="Enter latitude") + + args = parser.parse_args() + + if args.infile is not None and args.outfile is not None: + if args.utm == 1: + convert_file_from_utm(args.infile, args.outfile) + elif args.latlong == 1: + convert_file_from_latlong(args.infile, args.outfile) + else: + print ("Error, you must specify either -u/--utm, OR -l/--latlong") + + else: # direct method + if args.utm == 1: + if args.easting is not None and args.northing is not None \ + and args.zone is not None and args.side is not None: + doinline_from_utm(args.easting, args.northing, + args.zone, args.side) + else: + print ("Error-you must specify easting, northing, zone, side") + elif args.latlong == 1: + if args.longitude is not None and args.latitude is not None: + doinline_from_latlong(args.longitude, args.latitude) + else: + print ("Error, you must specify longitude and latitude.") + else: + print ("Error, you must choose -u/--utm, OR -l/--latlong") + + +if __name__ == '__main__': + main() diff --git a/ph5/utilities/utmtolatlong.py b/ph5/utilities/utmtolatlong.py deleted file mode 100644 index f442b568..00000000 --- a/ph5/utilities/utmtolatlong.py +++ /dev/null @@ -1,86 +0,0 @@ -# This script can be used to convert UTM coordinates to Latitude/Longitude -# Dave Thomas, 2019-08-01 - -from __future__ import (print_function) -from ph5.core import ph5utils -import argparse -import logging - -PROG_VERSION = '2019.239' -LOGGER = logging.getLogger(__name__) - - -def convert_file(infile, outfile): - counter = 0 - with open(outfile, "w") as theoutfile, open(infile) as theinfile: - ws = "Easting, Northing, Zone, Hemisphere, => Longitude, Latitude\n" - theoutfile.write(ws) - for line in theinfile: - counter = counter + 1 - if counter == 1: - continue # skip header line - fieldset = line.strip().split(",") - if len(fieldset) <= 1: - continue - easting = float(fieldset[0]) - northing = float(fieldset[1]) - zone = int(fieldset[2]) - hemisphere = str(fieldset[3]).upper() - try: - utm = ph5utils.UTMConversions() - lat, lon = utm.lat_long(easting, northing, zone, hemisphere) - ps = "easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" - printstr1 = (ps % (easting, northing, zone, hemisphere)) - printstr2 = "Lon = %.7f, Lat = %.7f" % (lon, lat) - print (printstr1 + " => " + printstr2) - outstr = "%.1f, %.1f, %d, %s, %.7f, %.7f" \ - % (easting, northing, zone, hemisphere, lon, lat) - theoutfile.write(outstr + "\n") - except ValueError: - print ("There was an error in UTM file conversion.") - return - - -def doinline(easting, northing, zone, hemisphere): - print ("easting=%s, northing=%s, zone=%s, hemisphere=%s" - % (easting, northing, zone, hemisphere)) - try: - utm = ph5utils.UTMConversions() - indat = ("Input: easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" - % (float(easting), float(northing), int(zone), hemisphere)) - print (indat) - lat, lon = utm.lat_long(float(easting), float(northing), - int(zone), hemisphere) - outstr = "Lon = %.7f, Lat = %.7f" % (lon, lat) - print (outstr) - print (''.join('*'*50)) - except ValueError: - print ("There was an error in UTM data conversion.") - return - - -def main(): - desc = 'Convert UTM locations to longitudes, latitudes, \ -in batches (-i, -o) or inline (-e,-n,-z,-s)' - parser = argparse.ArgumentParser(description=desc) - group = parser.add_mutually_exclusive_group() - group.add_argument("-i", "--infile", help="Input file with UTM \ -Easting, Northing, Zone, Hemisphere") - parser.add_argument("-o", "--outfile", help="Output file with \ -converted longitudes, latitudes") - group.add_argument("-e", "--easting", help="Enter UTM Easting") - parser.add_argument("-n", "--northing", help="Enter UTM Northing") - parser.add_argument("-z", "--zone", help="Enter UTM Zone") - parser.add_argument("-s", "--side", help="Side=Hemisphere, N or S") - - args = parser.parse_args() - - if args.infile is not None and args.outfile is not None: - convert_file(args.infile, args.outfile) - - else: # direct method - doinline(args.easting, args.northing, args.zone, args.side) - - -if __name__ == '__main__': - main() From 0d518918f9b285bc6ec9b7ed790c4b3aa1438c51 Mon Sep 17 00:00:00 2001 From: David Thomas Date: Mon, 23 Dec 2019 10:10:50 -0700 Subject: [PATCH 14/25] Fix'd segd2ph5.py, got it working. --- ph5/utilities/segd2ph5.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ph5/utilities/segd2ph5.py b/ph5/utilities/segd2ph5.py index b8eba88f..679f6b2e 100644 --- a/ph5/utilities/segd2ph5.py +++ b/ph5/utilities/segd2ph5.py @@ -1150,7 +1150,7 @@ def print_container(container): if NS != 'N' and NS != 'S': NS = 'N' - LAT, LON = utmc.lat_long( + LAT, LON = utmc.utm2latlong( SD.trace_headers.trace_header_N[ 4].receiver_point_X_final / 10., SD.trace_headers.trace_header_N[ From d3a9664179d4268163f24181398f5d3767a7a37c Mon Sep 17 00:00:00 2001 From: David Thomas Date: Fri, 17 Jan 2020 14:51:02 -0700 Subject: [PATCH 15/25] consolidated map functions --- ph5/core/ph5api.py | 7 +- ph5/core/ph5utils.py | 112 +++++++++++------------ ph5/core/segyfactory.py | 33 ++++--- ph5/utilities/novenqc.py | 7 +- ph5/utilities/segd2ph5.py | 20 ++-- ph5/utilities/tests/test_utmtolatlong.py | 26 +++--- ph5/utilities/utm_latlong.py | 15 ++- 7 files changed, 104 insertions(+), 116 deletions(-) diff --git a/ph5/core/ph5api.py b/ph5/core/ph5api.py index 73a6579a..dbb017cf 100755 --- a/ph5/core/ph5api.py +++ b/ph5/core/ph5api.py @@ -14,7 +14,7 @@ from ph5.core import columns, experiment, timedoy, ph5utils from tables.exceptions import NoSuchNodeError -PROG_VERSION = '2019.93' +PROG_VERSION = '2020.017' LOGGER = logging.getLogger(__name__) PH5VERSION = columns.PH5VERSION @@ -328,7 +328,6 @@ def get_offset(self, sta_line, sta_id, evt_line, evt_id): LOGGER.warning("Couldn't get offset.") return {} try: - geod = ph5utils.Geodesics() UNITS = 'm' if sta_line in self.Array_t and evt_line in self.Event_t: array_t = self.Array_t[sta_line]['byid'][sta_id][c] @@ -337,8 +336,8 @@ def get_offset(self, sta_line, sta_id, evt_line, evt_id): lat0 = array_t[0]['location/Y/value_d'] lon1 = event_t['location/X/value_d'] lat1 = event_t['location/Y/value_d'] - az, baz, dist = geod.run_geod(lat0, lon0, lat1, lon1, - FACTS_M[UNITS]) + az, baz, dist = ph5utils.run_geod(lat0, lon0, lat1, lon1, + FACTS_M[UNITS]) except Exception as e: LOGGER.warning("Couldn't get offset. {0}".format(repr(e))) return {} diff --git a/ph5/core/ph5utils.py b/ph5/core/ph5utils.py index 6dc9a05b..a5328451 100644 --- a/ph5/core/ph5utils.py +++ b/ph5/core/ph5utils.py @@ -18,7 +18,7 @@ from pyproj import Transformer, Geod import math -PROG_VERSION = "2019.330" +PROG_VERSION = "2020.017" class PH5Response(object): @@ -70,90 +70,80 @@ def get_n_i(self, sensor_keys, datalogger_keys): return ph5_resp.n_i -class UTMConversions: # updated 2019-09-27 dthomas +class UTMConversions: - def utm2latlong(self, easting, northing, zone, hemisphere): - # utm to lat/long conversion - epsg_wgs84 = "EPSG:4326" - side = hemisphere[0].upper() - if side == "S": - epsgroot = "327" - elif side == "N": - epsgroot = "326" - else: - epsgroot = None # will throw error - epsg_utm = "EPSG:" + epsgroot + str(zone) + def __init__(self, lat, lon, side, zone): + self.epsg_wgs84 = "EPSG:4326" - transformer = Transformer.from_crs(epsg_utm, - epsg_wgs84, always_xy=True) - lon, lat = transformer.transform(float(easting), float(northing)) - return (lat, lon) + if side is not None and lat is None: + if side == "S": + epsgroot = "327" + elif side == "N": + epsgroot = "326" + elif lat is not None: + if lat < 0.0: + epsgroot = "327" + side = 'S' + elif lat >= 0.0: + epsgroot = "326" + side = 'N' + + if zone is not None and lon is None: + self.epsg_utm = "EPSG:" + epsgroot + str(zone) + elif lon is not None: + zone = self.lon2zone(lon) + self.epsg_utm = "EPSG:" + epsgroot + str(zone) + + self.utm_zone = zone + self.hemisphere = side def lon2zone(self, lon): ''' Get UTM zone from longitude ''' - zone = int(math.floor((lon + 180.)/6.0) % 60) + 1 + zone = int(math.floor((float(lon) + 180.)/6.0) % 60) + 1 return zone + def utm2latlong(self, easting, northing): + transformer = Transformer.from_crs(self.epsg_utm, self.epsg_wgs84, + always_xy=True) + lon, lat = transformer.transform(float(easting), float(northing)) + return (lat, lon) + def geod2utm(self, lat, lon, elev): # utility function - # lat/long to UTM conversion - zone = self.lon2zone(lon) - epsg_wgs84 = "EPSG:4326" - if lat < 0.0: - epsgroot = "327" - elif lat >= 0.0: - epsgroot = "326" - epsg_utm = "EPSG:" + epsgroot + str(zone) - - transformer = Transformer.from_crs(epsg_wgs84, epsg_utm, + transformer = Transformer.from_crs(self.epsg_wgs84, self.epsg_utm, always_xy=True) easting, northing = transformer.transform(lon, lat) return (northing, easting, elev) # e.g. Y, X, Z def latlong2utm(self, lat, lon): - # lat/long to UTM conversion - zone = self.lon2zone(float(lon)) - epsg_wgs84 = "EPSG:4326" - if lat < 0.0: - epsgroot = "327" - hemisphere = 'S' - elif lat >= 0.0: - epsgroot = "326" - hemisphere = 'N' - epsg_utm = "EPSG:" + epsgroot + str(zone) - - transformer = Transformer.from_crs(epsg_wgs84, epsg_utm, + transformer = Transformer.from_crs(self.epsg_wgs84, self.epsg_utm, always_xy=True) easting, northing = transformer.transform(lon, lat) - return (easting, northing, zone, hemisphere) + return (easting, northing, self.utm_zone, self.hemisphere) -class TSPConversions: # added 2019-09-30 dthomas, Texas State Plane Coords - - def lat_long(self, easting, northing): - epsg_wgs84 = "EPSG:4326" - epsg_sp4202 = "EPSG:2276" # State Plane Coords in US FEET - transformer = Transformer.from_crs(epsg_sp4202, epsg_wgs84, - always_xy=True) - lon, lat = transformer.transform(easting, northing) - return (lon, lat) - +def tspc_lat_long(self, easting, northing): # Texas State Plane Coords + epsg_wgs84 = "EPSG:4326" + epsg_sp4202 = "EPSG:2276" # State Plane Coords in US FEET + transformer = Transformer.from_crs(epsg_sp4202, epsg_wgs84, + always_xy=True) + lon, lat = transformer.transform(easting, northing) + return (lon, lat) -class Geodesics: # added 2019-10-21 dthomas, consolidating from other locs - def run_geod(self, lat0, lon0, lat1, lon1, scalar=1.0): - ELLIPSOID = 'WGS84' +def run_geod(self, lat0, lon0, lat1, lon1, scalar=1.0): + ELLIPSOID = 'WGS84' - config = "+ellps={0}".format(ELLIPSOID) + config = "+ellps={0}".format(ELLIPSOID) - g = Geod(config) + g = Geod(config) - az, baz, dist = g.inv(lon0, lat0, lon1, lat1) + az, baz, dist = g.inv(lon0, lat0, lon1, lat1) - if dist: - dist /= scalar + if dist: + dist /= scalar - # Return list containing azimuth, back azimuth, distance - return az, baz, dist + # Return list containing azimuth, back azimuth, distance + return az, baz, dist """ diff --git a/ph5/core/segyfactory.py b/ph5/core/segyfactory.py index 964d6309..f2a0aad7 100644 --- a/ph5/core/segyfactory.py +++ b/ph5/core/segyfactory.py @@ -19,7 +19,7 @@ from ph5.core import ph5utils # for geod2utm, run_geod from ph5.core import segy_h, ebcdic -PROG_VERSION = '2018.268' +PROG_VERSION = '2020.015' LOGGER = logging.getLogger(__name__) os.environ['TZ'] = 'UTC' @@ -786,10 +786,11 @@ def set_trace_header(self): if self.utm is True: try: - utmc = ph5utils.UTMConversions() - Y, X, Z = utmc.geod2utm(self.array_t['location/Y/value_d'], - self.array_t['location/X/value_d'], - self.array_t['location/Z/value_d']) + lat = self.array_t['location/Y/value_d'] + lon = self.array_t['location/X/value_d'] + elev = self.array_t['location/Z/value_d'] + utmc = ph5utils.UTMConversions(lat, lon, None, None) + Y, X, Z = utmc.geod2utm(lat, lon, elev) s, vx, vy = pick_values_32(X, Y) tra['coordScale'] = s @@ -842,10 +843,11 @@ def set_trace_header(self): if self.utm: try: - utmc = ph5utils.UTMConversions() - Y, X, Z = utmc.geod2utm(self.event_t['location/Y/value_d'], - self.event_t['location/X/value_d'], - self.event_t['location/Z/value_d']) + lat = self.event_t['location/Y/value_d'] + lon = self.event_t['location/X/value_d'] + elev = self.event_t['location/Z/value_d'] + utmc = ph5utils.UTMConversions(lat, lon, None, None) + Y, X, Z = utmc.geod2utm(lat, lon, elev) s, vx, vy = pick_values_32(X, Y) tra['sourceLongOrX'] = vx @@ -870,13 +872,14 @@ def set_trace_header(self): tra['sourceToRecDist'] = self.offset_t['offset/value_d'] else: try: - geod = ph5utils.Geodesics() + lat1 = self.event_t['location/Y/value_d'] + lon1 = self.event_t['location/X/value_d'] + lat2 = self.array_t['location/Y/value_d'] + lon2 = self.array_t['location/X/value_d'] + UNITS = 'm' - az_baz_dist = geod.run_geod(self.event_t['location/Y/value_d'], - self.event_t['location/X/value_d'], - self.array_t['location/Y/value_d'], - self.array_t['location/X/value_d'], - FACTS[UNITS]) + az_baz_dist = ph5utils.run_geod(lat1, lon1, lat2, lon2, + FACTS[UNITS]) tra['sourceToRecDist'] = az_baz_dist[2] except Exception as e: # sys.stderr.write (e.message) diff --git a/ph5/utilities/novenqc.py b/ph5/utilities/novenqc.py index 2657f511..dea550ff 100644 --- a/ph5/utilities/novenqc.py +++ b/ph5/utilities/novenqc.py @@ -17,7 +17,7 @@ from ph5.core import timedoy -PROG_VERSION = '2019.14' +PROG_VERSION = '2020.015' LOGGER = logging.getLogger(__name__) FACTS = {'km': 1000., 'm': 1., 'dm': 1. / 10., 'cm': 1. / 100., @@ -215,10 +215,9 @@ def qc_dist(ys, xs, zs): # # Need to check for UTM and convert to lat/lon # - geod = ph5utils.Geodesics() units = 'm' - az, baz, dist = geod.run_geod(ys[0], xs[0], ys[1], xs[1], - FACTS[units]) + az, baz, dist = ph5utils.run_geod(ys[0], xs[0], ys[1], xs[1], + FACTS[units]) if len(zs) > 1: zdelta = float(zs[1]) - float(zs[0]) else: diff --git a/ph5/utilities/segd2ph5.py b/ph5/utilities/segd2ph5.py index 679f6b2e..6cb6c7c9 100644 --- a/ph5/utilities/segd2ph5.py +++ b/ph5/utilities/segd2ph5.py @@ -19,7 +19,7 @@ from ph5 import LOGGING_FORMAT -PROG_VERSION = "2019.252" +PROG_VERSION = "2020.017" logger = logging.getLogger(__name__) MAX_PH5_BYTES = 1073741824 * 100. # 100 GB (1024 X 1024 X 1024 X 2) @@ -1142,24 +1142,22 @@ def print_container(container): try: if UTM: # UTM - utmc = ph5utils.UTMConversions() new_UTM = re.split(r'(\d+)', UTM) utmzone = str(new_UTM[1]) NS = str(new_UTM[2]).upper() if NS != 'N' and NS != 'S': NS = 'N' - - LAT, LON = utmc.utm2latlong( - SD.trace_headers.trace_header_N[ - 4].receiver_point_X_final / 10., - SD.trace_headers.trace_header_N[ - 4].receiver_point_Y_final / 10., - utmzone, NS) + easting = SD.trace_headers.trace_header_N[ + 4].receiver_point_X_final / 10. + northing = SD.trace_headers.trace_header_N[ + 4].receiver_point_Y_final / 10. + utmc = ph5utils.UTMConversions(None, None, + NS, utmzone) + LAT, LON = utmc.utm2latlong(easting, northing) elif TSPF: # Texas State Plane coordinates - tspc = ph5utils.TSPConversions() - LAT, LON = tspc.lat_long( + LAT, LON = ph5utils.tspc_lat_long( SD.trace_headers.trace_header_N[ 4].receiver_point_X_final / 10., SD.trace_headers.trace_header_N[ diff --git a/ph5/utilities/tests/test_utmtolatlong.py b/ph5/utilities/tests/test_utmtolatlong.py index e39d420a..ecafafa9 100644 --- a/ph5/utilities/tests/test_utmtolatlong.py +++ b/ph5/utilities/tests/test_utmtolatlong.py @@ -1,31 +1,31 @@ import unittest import pyproj from ph5.core.ph5utils import UTMConversions -from ph5.core.ph5utils import TSPConversions +from ph5.core.ph5utils import tspc_lat_long class TestUTMconversion(unittest.TestCase): def test_is_valid_utm_conversion(self): - u = UTMConversions() # PIC Socorro - lat, lon = u.utm2latlong(322916, 3771967, 13, 'N') + u = UTMConversions(None, None, 'N', 13) # PIC Socorro + lat, lon = u.utm2latlong(322916, 3771967) self.assertAlmostEqual(lat, 34.07349577107704) self.assertAlmostEqual(lon, -106.91909595147378) def test_is_valid_utm_conversion_south(self): - u = UTMConversions() # Castle Rock Antarctica - lat, lon = u.utm2latlong(540947, 1361594, 58, 'S') + u = UTMConversions(None, None, 'S', 58) # Castle Rock Antarctica + lat, lon = u.utm2latlong(540947, 1361594) self.assertAlmostEqual(lat, -77.81567398301094) self.assertAlmostEqual(lon, 166.73816638798527) def test_is_valid_utm_conversion_inverse(self): - u = UTMConversions() # PIC Socorro + u = UTMConversions(34.0734958, -106.9190959, None, None) # PIC northing, easting, elev = u.geod2utm(34.0734958, -106.9190959, 1456.0) self.assertAlmostEqual(northing, 3771967.003118457) self.assertAlmostEqual(easting, 322916.0048106084) def test_is_valid_utm_conversion_fancyinverse(self): - u = UTMConversions() # PIC Socorro + u = UTMConversions(34.0734958, -106.9190959, None, None) # PIC easting, northing, zone, hemisphere = \ u.latlong2utm(34.0734958, -106.9190959) self.assertAlmostEqual(easting, 322916.0048106084) @@ -34,20 +34,20 @@ def test_is_valid_utm_conversion_fancyinverse(self): self.assertAlmostEqual(hemisphere, 'N') def test_is_valid_tsp_conversion(self): - t = TSPConversions() # Sweetwater, Texas, units US FEET - lon, lat = t.lat_long(1380811, 6858888) + # Sweetwater, Texas, units US FEET + lon, lat = tspc_lat_long(self, 1380811, 6858888) self.assertAlmostEqual(lon, -100.40568335900281) self.assertAlmostEqual(lat, 32.468972700219375) def test_for_correct_type(self): with self.assertRaises(ValueError): - u = UTMConversions() # 'heck' is not a float - u.utm2latlong('heck', 'no', 58, 'S') + u = UTMConversions('heck', 'no', 'S', 58) # 'heck' is not a float + u.utm2latlong('heck', 'no') def test_for_correct_value(self): # 666 is invalid UTM zone with self.assertRaises(pyproj.exceptions.CRSError): - u = UTMConversions() - u.utm2latlong(540947, 1361594, 666, 'S') + u = UTMConversions(None, None, 'S', 666) + u.utm2latlong(540947, 1361594) if __name__ == "__main__": diff --git a/ph5/utilities/utm_latlong.py b/ph5/utilities/utm_latlong.py index 88270261..c99f9229 100644 --- a/ph5/utilities/utm_latlong.py +++ b/ph5/utilities/utm_latlong.py @@ -6,7 +6,7 @@ import argparse import logging -PROG_VERSION = '2019.330' +PROG_VERSION = '2020.015' LOGGER = logging.getLogger(__name__) @@ -27,8 +27,8 @@ def convert_file_from_utm(infile, outfile): # batch file method zone = int(fieldset[2]) hemisphere = str(fieldset[3]).upper() try: - utm = ph5utils.UTMConversions() - lat, lon = utm.utm2latlong(easting, northing, zone, hemisphere) + utm = ph5utils.UTMConversions(None, None, hemisphere, zone) + lat, lon = utm.utm2latlong(easting, northing) ps = "easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" printstr1 = (ps % (easting, northing, zone, hemisphere)) printstr2 = "Lon = %.7f, Lat = %.7f" % (lon, lat) @@ -56,7 +56,7 @@ def convert_file_from_latlong(infile, outfile): # batch file method lat = float(fieldset[0]) lon = float(fieldset[1]) try: - utm = ph5utils.UTMConversions() + utm = ph5utils.UTMConversions(lat, lon, None, None) easting, northing, zone, hemisphere = utm.latlong2utm(lat, lon) ps = "easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" printstr1 = "Lon = %.7f, Lat = %.7f" % (lon, lat) @@ -74,12 +74,11 @@ def doinline_from_utm(easting, northing, zone, hemisphere): # interactive print ("easting=%s, northing=%s, zone=%s, hemisphere=%s" % (easting, northing, zone, hemisphere)) try: - utm = ph5utils.UTMConversions() + utm = ph5utils.UTMConversions(None, None, hemisphere, zone) indat = ("Input: easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" % (float(easting), float(northing), int(zone), hemisphere)) print (indat) - lat, lon = utm.utm2latlong(float(easting), float(northing), - int(zone), hemisphere) + lat, lon = utm.utm2latlong(float(easting), float(northing)) outstr = "Lon = %.7f, Lat = %.7f" % (lon, lat) print (outstr) print (''.join('*'*50)) @@ -90,7 +89,7 @@ def doinline_from_utm(easting, northing, zone, hemisphere): # interactive def doinline_from_latlong(lon, lat): # interactive try: - utm = ph5utils.UTMConversions() + utm = ph5utils.UTMConversions(lat, lon, None, None) indat = "Input: lon=%.7f, lat=%.7f" % (float(lon), float(lat)) print (indat) easting, northing, zone, hemisphere = utm.latlong2utm(lat, lon) From b1c2ae8d1dc5a90cde7184fc2d63b2a3c612ffa6 Mon Sep 17 00:00:00 2001 From: David Thomas Date: Fri, 17 Jan 2020 16:32:40 -0700 Subject: [PATCH 16/25] redo of segd2ph5.py --- ph5/utilities/segd2ph5.py | 214 ++++++++++++-------------------------- 1 file changed, 69 insertions(+), 145 deletions(-) diff --git a/ph5/utilities/segd2ph5.py b/ph5/utilities/segd2ph5.py index 6cb6c7c9..ecf840d9 100644 --- a/ph5/utilities/segd2ph5.py +++ b/ph5/utilities/segd2ph5.py @@ -17,10 +17,10 @@ from math import modf from ph5.core import experiment, columns, segdreader, ph5utils from ph5 import LOGGING_FORMAT - +## from pyproj import Proj, transform PROG_VERSION = "2020.017" -logger = logging.getLogger(__name__) +LOGGER = logging.getLogger(__name__) MAX_PH5_BYTES = 1073741824 * 100. # 100 GB (1024 X 1024 X 1024 X 2) @@ -97,7 +97,6 @@ def update(self): self.lines, self.keys = self.t.read_responses() def match(self, bw, gain): - # print self.lines for l in self.lines: if l['bit_weight/value_d'] == bw and l['gain/value_i'] == gain: return l['n_i'] @@ -121,13 +120,12 @@ def read_infile(infile): global FILES def fn_sort(a, b): - # print os.path.basename (a), os.path.basename (b) return cmp(os.path.basename(a), os.path.basename(b)) try: fh = file(infile) except Exception: - logger.warning("Failed to open %s\n" % infile) + LOGGER.warning("Failed to open %s\n" % infile) return while True: @@ -229,17 +227,20 @@ def get_args(): else: raise Exception("No outfile (PH5) given.\n") + setLogger() + + +def setLogger(): + if LOGGER.handlers != []: + LOGGER.removeHandler(LOGGER.handlers[0]) + # Write log to file ch = logging.FileHandler("segd2ph5.log") ch.setLevel(logging.INFO) # Add formatter formatter = logging.Formatter(LOGGING_FORMAT) ch.setFormatter(formatter) - logger.addHandler(ch) - # Need to process in order: R309_674.1.0.rg16, 309 == line, - # 674 = receiver point, 1 = first file - # Sorted where the file list is read... - # FILES.sort () + LOGGER.addHandler(ch) def initializeExperiment(): @@ -285,7 +286,7 @@ def update_index_t_info(starttime, samples, sps): DAS_INFO[das].append(di) MAP_INFO[das].append(dm) - logger.info( + LOGGER.info( "DAS: {0} File: {1} First Sample: {2} Last Sample: {3}".format( das, ph5file, time.ctime(starttime), time.ctime(stoptime))) @@ -294,15 +295,13 @@ def update_external_references(): ''' Update external references in master.ph5 to miniPH5 files in Receivers_t ''' global F - logger.info("Updating external references...") + LOGGER.info("Updating external references...") n = 0 for i in INDEX_T_DAS.rows: external_file = i['external_file_name_s'][2:] external_path = i['hdf5_path_s'] - i['serial_number_s'] target = external_file + ':' + external_path external_group = external_path.split('/')[3] - # print external_file, external_path, das, target, external_group # Nuke old node try: @@ -310,7 +309,6 @@ def update_external_references(): group_node.remove() except Exception as e: pass - # print "DAS nuke ", e.message # Re-create node try: @@ -319,24 +317,16 @@ def update_external_references(): n += 1 except Exception as e: # pass - logger.error("{0}\n".format(e.message)) + LOGGER.error("{0}\n".format(e.message)) - logger.info("done, {0} das nodes recreated.\n".format(n)) + LOGGER.info("done, {0} das nodes recreated.\n".format(n)) n = 0 for i in INDEX_T_MAP.rows: - # XXX - # keys = i.keys () - # keys.sort () - # for k in keys : - # print k, i[k] - external_file = i['external_file_name_s'][2:] external_path = i['hdf5_path_s'] - i['serial_number_s'] target = external_file + ':' + external_path external_group = external_path.split('/')[3] - # print external_file, external_path, das, target, external_group # Nuke old node try: @@ -344,7 +334,6 @@ def update_external_references(): group_node.remove() except Exception as e: pass - # print "MAP nuke ", e.message # Re-create node try: @@ -353,9 +342,9 @@ def update_external_references(): n += 1 except Exception as e: # pass - logger.error("{0}\n".format(e.message)) + LOGGER.error("{0}\n".format(e.message)) - logger.info("done, {0} map nodes recreated.\n".format(n)) + LOGGER.info("done, {0} map nodes recreated.\n".format(n)) def get_current_data_only(size_of_data, das=None): @@ -363,8 +352,6 @@ def get_current_data_only(size_of_data, das=None): less than MAX_PH5_BYTES after raw data is added to it. ''' - # global NM - # global INDEX_T, CURRENT_DAS def sstripp(s): s = s.replace('.ph5', '') s = s.replace('./', '') @@ -401,8 +388,6 @@ def smallest(): return openPH5('miniPH5_{0:05d}'.format(FIRST_MINI)) size_of_exrec = os.path.getsize(newestfile + '.ph5') - # print size_of_data, size_of_exrec, size_of_data + size_of_exrec, - # MAX_PH5_BYTES if NUM_MINI is not None: fm = FIRST_MINI - 1 if (int(newestfile[8:13]) - fm) < NUM_MINI: @@ -498,19 +483,17 @@ def process_das(): p_das_t['receiver_table_n_i'] = M[get_true_channel()] else: p_das_t['receiver_table_n_i'] = 0 # 0 -> Z - logger.warning( + LOGGER.warning( "Header channel set: {0}. Check Receiver_t entries!".format( th.trace_header.channel_set)) p_das_t['response_table_n_i'] = None p_das_t['time_table_n_i'] = 0 p_das_t['time/type_s'] = 'BOTH' - # trace_epoch = th.trace_header_N[2].gps_tim1 * 4294967296 +\ - # th.trace_header_N[2].gps_tim2 try: trace_epoch = th.trace_header_N[2].shot_epoch except Exception as e: - logger.warning("Failed to read shot epoch: {0}.".format(e.message)) + LOGGER.warning("Failed to read shot epoch: {0}.".format(e.message)) trace_epoch = 0. f, i = modf(trace_epoch / 1000000.) @@ -526,9 +509,6 @@ def process_das(): p_das_t['raw_file_name_s'] = os.path.basename(SD.name()) p_das_t['array_name_data_a'] = EXREC.ph5_g_receivers.nextarray( 'Data_a_') - # p_das_t['array_name_SOH_a'] = None - # p_das_t['array_name_event_a'] = None - # p_das_t['array_name_log_a'] = None p_response_t = {} ''' n_i @@ -549,7 +529,7 @@ def process_das(): try: p_response_t['gain/value_i'] = th.trace_header_N[3].preamp_gain_db except Exception as e: - logger.warning( + LOGGER.warning( "Failed to read trace pre amp gain: {0}.".format(e.message)) p_response_t['gain/value_i'] = 0. p_response_t['gain/units_s'] = 'Unknown' @@ -568,16 +548,13 @@ def process_das(): # Write trace data here try: # Convert to counts - # print tr.max (), tr.min () tr_counts = tr / LSB EXREC.ph5_g_receivers.newarray( p_das_t['array_name_data_a'], tr_counts, dtype='int32', description=des) except Exception as e: # Failed, leave as float - # for x in tr : print x/LSB - # print e.message - logger.warning( + LOGGER.warning( "Could not convert trace to counts. max: {1},\ min {2}\n{0}".format( e.message, tr.max(), tr.min())) @@ -585,7 +562,6 @@ def process_das(): EXREC.ph5_g_receivers.newarray( p_das_t['array_name_data_a'], tr, dtype='float32', description=des) - # update_index_t_info(p_das_t['time/epoch_l'] + ( float(p_das_t['time/micro_seconds_i']) / 1000000.), p_das_t['sample_count_i'], @@ -593,7 +569,6 @@ def process_das(): 'sample_rate_multiplier_i']) def process_array(): - # global DN p_array_t = {} def seen_sta(): @@ -601,9 +576,10 @@ def seen_sta(): return False elif Das not in ARRAY_T[line]: return False - elif chan_set in ARRAY_T[line][Das]: - # chans = ARRAY_T[line][Das].keys() # All channels seen - if not ARRAY_T[line][Das][chan_set]: + elif dtime not in ARRAY_T[line][Das]: + return False + elif chan_set in ARRAY_T[line][Das][dtime]: + if not ARRAY_T[line][Das][dtime][chan_set]: return False else: return True @@ -680,7 +656,6 @@ def seen_sta(): chan 1 -> Z ''' if SD.chan_sets_per_scan >= 3: - # true_chan = get_true_channel () OM = {1: '1', 2: '2', 3: 'Z'} elif SD.chan_sets_per_scan == 1: OM = {1: 'Z'} @@ -690,7 +665,6 @@ def seen_sta(): orientation_code = get_true_channel() else: orientation_code = OM[get_true_channel()] - # for cs in range (SD.chan_sets_per_scan) : p_array_t['seed_band_code_s'] = band_code p_array_t['seed_instrument_code_s'] = instrument_code p_array_t['seed_orientation_code_s'] = orientation_code @@ -701,7 +675,7 @@ def seen_sta(): try: f, i = modf(rh.extended_header_1.epoch_deploy / 1000000.) except Exception as e: - logger.warning( + LOGGER.warning( "Failed to read extended header 1 deploy epoch: {0}.".format( e.message)) f = i = 0. @@ -712,7 +686,7 @@ def seen_sta(): try: f, i = modf(rh.extended_header_1.epoch_pickup / 1000000.) except Exception as e: - logger.warning( + LOGGER.warning( "Failed to read extended header 1 pickup epoch: {0}.".format( e.message)) f = i = 0. @@ -723,13 +697,12 @@ def seen_sta(): p_array_t['das/manufacturer_s'] = 'FairfieldNodal' DM = {1: 'ZLAND 1C', 3: "ZLAND 3C"} try: - # p_array_t['das/model_s'] = DM[SD.chan_sets_per_scan] if SD.chan_sets_per_scan >= 3: p_array_t['das/model_s'] = DM[3] else: p_array_t['das/model_s'] = DM[1] except Exception as e: - logger.warning( + LOGGER.warning( "Failed to read channel sets per scan: {0}.".format(e.message)) p_array_t['das/model_s'] = 'zland-[13]C' p_array_t['das/serial_number_s'] = Das @@ -762,40 +735,39 @@ def seen_sta(): p_array_t['location/Z/value_d'] =\ th.trace_header_N[4].receiver_point_depth_final / 10. except Exception as e: - logger.warning( + LOGGER.warning( "Failed to read receiver point depth: {0}.".format(e.message)) p_array_t['location/Z/value_d'] = 0. p_array_t['channel_number_i'] = get_true_channel() - # p_array_t['description_s'] = str (th.trace_header_N[4].line_number) try: p_array_t['description_s'] = "DAS: {0}, Node ID: {1}".format( Das, rh.extended_header_1.id_number) except Exception as e: - logger.warning( + LOGGER.warning( "Failed to read extended header 1 ID number: {0}.".format( e.message)) try: line = th.trace_header_N[4].line_number except Exception as e: - logger.warning("Failed to read line number: {0}.".format( + LOGGER.warning("Failed to read line number: {0}.".format( e.message)) line = 0 chan_set = get_true_channel() + dtime = p_array_t['deploy_time/epoch_l'] if line not in ARRAY_T: ARRAY_T[line] = {} if Das not in ARRAY_T[line]: ARRAY_T[line][Das] = {} - if chan_set not in ARRAY_T[line][Das]: - ARRAY_T[line][Das][chan_set] = [] + if dtime not in ARRAY_T[line][Das]: + ARRAY_T[line][Das][dtime] = {} + if chan_set not in ARRAY_T[line][Das][dtime]: + ARRAY_T[line][Das][dtime][chan_set] = [] if not seen_sta(): - ARRAY_T[line][Das][chan_set].append(p_array_t) - # if rh.general_header_block_1.chan_sets_per_scan ==\ - # len (ARRAY_T[line].keys ()) : - # DN = True + ARRAY_T[line][Das][dtime][chan_set].append(p_array_t) def process_reel_headers(): global RH @@ -849,52 +821,40 @@ def process(hdr, header_type): TRACE_JSON.append(json.dumps( ll, sort_keys=True, indent=4).split('\n')) - # log_array, log_name = getLOG () - process(th.trace_header, "Trace Header") for i in range(len(th.trace_header_N)): ht = "Header N-{0}".format(i + 1) process(th.trace_header_N[i], ht) - # - # - # - # print "\tprocess das" - # for cs in range (rh.chan_sets_per_scan) : process_das() - # if not DN : - # print "\tprocess array" process_array() - # print "\tprocess headers" if not RH: process_reel_headers() - # print "\tprocess trace header" process_trace_header() def write_arrays(Array_t): ''' Write /Experiment_g/Sorts_g/Array_t_xxx ''' - def station_cmp(x, y): - return cmp(x['id_s'], y['id_s']) - lines = sorted(Array_t.keys()) # Loop through arrays/lines for line in lines: - # name = EX.ph5_g_sorts.nextName () name = "Array_t_{0:03d}".format(int(line)) a = EX.ph5_g_sorts.newArraySort(name) - stations = sorted(Array_t[line].keys()) - # Loop through stations - for station in stations: - chan_sets = sorted(Array_t[line][station].keys()) - # Loop through channel sets - for chan_set in chan_sets: - try: - for array_t in Array_t[line][station][chan_set]: - columns.populate(a, array_t) - except Exception as e: - print e.message + das_list = sorted(Array_t[line].keys()) + # Loop through das_list + for das in das_list: + dtimes = sorted(Array_t[line][das].keys()) + # Loop through deploying times + for dtime in dtimes: + chan_sets = sorted(Array_t[line][das][dtime].keys()) + # Loop through channel sets + for chan_set in chan_sets: + try: + for array_t in Array_t[line][das][dtime][chan_set]: + columns.populate(a, array_t) + except Exception as e: + print e.message def writeINDEX(): @@ -962,13 +922,6 @@ def writeINDEX(): MAP_INFO = {} -def correct_append_number(): - # from math import modf - traces = SD.reel_headers.extended_header_2['number_records'] - x = traces % APPEND - APPEND - x - - def main(): import time then = time.time() @@ -1018,22 +971,15 @@ def get_node(sd): return pn, id, nc - def print_container(container): - keys = container.keys() - for k in keys: - print k, container[k] - - print '-' * 80 - try: get_args() except Exception, err_msg: - logger.error(err_msg) + LOGGER.error(err_msg) return 1 initializeExperiment() - logger.info("segd2ph5 {0}".format(PROG_VERSION)) - logger.info("{0}".format(sys.argv)) + LOGGER.info("segd2ph5 {0}".format(PROG_VERSION)) + LOGGER.info("{0}".format(sys.argv)) if len(FILES) > 0: RESP = Resp(EX.ph5_g_responses) rows, keys = EX.ph5_g_receivers.read_index() @@ -1048,61 +994,48 @@ def print_container(container): try: SIZE = os.path.getsize(f) except Exception as e: - logger.error("Failed to read {0}, {1}.\ + LOGGER.error("Failed to read {0}, {1}.\ Skipping...\n".format(f, str(e.message))) continue SD = segdreader.Reader(infile=f) LAT = None LON = None - # DN = False; RH = False - # print "isSEGD" if not SD.isSEGD(expected_manufactures_code=MANUFACTURERS_CODE): - logger.error( + LOGGER.error( "{0} is not a Fairfield SEG-D file. Skipping.".format( SD.name())) continue try: - # print "general headers" SD.process_general_headers() - # print "channel sets" SD.process_channel_set_descriptors() - # print "extended headers" SD.process_extended_headers() - # print "external headers" SD.process_external_headers() except segdreader.InputsError as e: - logger.error( + LOGGER.error( "Possible bad SEG-D file -- {0}".format( "".join(e.message))) continue - # Das = (SD.reel_headers.extended_header_3.line_number * 1000) +\ - # SD.reel_headers.extended_header_3.receiver_point - # APPEND = correct_append_number () nleft = APPEND Das = get_das(SD) part_number, node_id, number_of_channels = get_node(SD) - # EXREC = get_current_data_only(SIZE, Das) - logger.info(":: {0}\n".format(SD.name())) - logger.info( + LOGGER.info(":: {0}\n".format(SD.name())) + LOGGER.info( "Processing: {0}... Size: {1}\n".format(SD.name(), SIZE)) if EXREC.filename != MINIPH5: - logger.info("Opened: {0}...\n".format(EXREC.filename)) - logger.info( + LOGGER.info("Opened: {0}...\n".format(EXREC.filename)) + LOGGER.info( "DAS: {0}, Node ID: {1}, PN: {2}, Channels: {3}".format( Das, node_id, part_number, number_of_channels)) MINIPH5 = EXREC.filename n = 0 trace_headers_list = [] - # lat = None - # lon = None while True: - # if SD.isEOF(): if n != 0: thl = [] @@ -1123,8 +1056,6 @@ def print_container(container): traces = new_traces process_traces(SD.reel_headers, thl[0], t) - # process_traces (SD.reel_headers,\ - # trace_headers_list[0], trace) if DAS_INFO: writeINDEX() break @@ -1132,8 +1063,8 @@ def print_container(container): try: trace, cs = SD.process_trace() except segdreader.InputsError as e: - logger.error("{0}\n".format(F)) - logger.error( + LOGGER.error("{0}\n".format(F)) + LOGGER.error( "Possible bad SEG-D file -- {0}".format( "".join(e.message))) break @@ -1142,7 +1073,7 @@ def print_container(container): try: if UTM: # UTM - new_UTM = re.split(r'(\d+)', UTM) + new_UTM = re.split(r'(\d+)', UTM) utmzone = str(new_UTM[1]) NS = str(new_UTM[2]).upper() @@ -1168,23 +1099,17 @@ def print_container(container): LON = SD.trace_headers.trace_header_N[ 4].receiver_point_X_final / 10. except Exception as e: - logger.warning( + LOGGER.warning( "Failed to convert location: {0}.\n".format( e.message)) trace_headers_list.append(SD.trace_headers) - # for cs in range (SD.chan_sets_per_scan) : if n == 0: traces.append(Trace(trace, SD.trace_headers)) n = 1 - # Node kludge - # Das = (SD.trace_headers.trace_header_N[0]\ - # .receiver_line * 1000) + SD.trace_headers.\ - # trace_header_N[0].receiver_point Das = get_das(SD) else: traces.append(Trace(trace, SD.trace_headers)) - # traces = npappend (traces, trace) if n >= nleft or EVERY is True: thl = [] @@ -1198,12 +1123,10 @@ def print_container(container): if chan_set is None: chan_set = T.headers.trace_header.channel_set if chan_set == T.headers.trace_header.channel_set: - # print type (t) if isinstance(t, type(None)): t = T.trace else: t = npappend(t, T.trace) - # print len (t), t.min (), t.max () else: new_traces.append(T) if chan_set_next is None: @@ -1232,7 +1155,7 @@ def print_container(container): for line in TRACE_JSON: log_array.append(line) - logger.info(":: {0}\n".format(F)) + LOGGER.info(":: {0}\n".format(F)) write_arrays(ARRAY_T) seconds = time.time() - then @@ -1241,9 +1164,9 @@ def print_container(container): EX.ph5close() EXREC.ph5close() except Exception as e: - logger.warning("{0}\n".format("".join(e.message))) + LOGGER.warning("{0}\n".format("".join(e.message))) - logger.info("Done...{0:b}".format(int(seconds / 6.))) + LOGGER.info("Done...{0:b}".format(int(seconds / 6.))) logging.shutdown() prof() @@ -1251,3 +1174,4 @@ def print_container(container): if __name__ == '__main__': main() + From 984e95bdee53196880d935d326d3d5b0962cab5b Mon Sep 17 00:00:00 2001 From: David Thomas Date: Fri, 17 Jan 2020 17:13:07 -0700 Subject: [PATCH 17/25] redoing segd2ph5.py again --- ph5/utilities/segd2ph5.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/ph5/utilities/segd2ph5.py b/ph5/utilities/segd2ph5.py index ecf840d9..9b30f86a 100644 --- a/ph5/utilities/segd2ph5.py +++ b/ph5/utilities/segd2ph5.py @@ -17,7 +17,6 @@ from math import modf from ph5.core import experiment, columns, segdreader, ph5utils from ph5 import LOGGING_FORMAT -## from pyproj import Proj, transform PROG_VERSION = "2020.017" LOGGER = logging.getLogger(__name__) @@ -1073,7 +1072,7 @@ def get_node(sd): try: if UTM: # UTM - new_UTM = re.split(r'(\d+)', UTM) + new_UTM = re.split(r'(\d+)', UTM) utmzone = str(new_UTM[1]) NS = str(new_UTM[2]).upper() @@ -1174,4 +1173,3 @@ def get_node(sd): if __name__ == '__main__': main() - From ddaf75919aa553ae5414c5607bb36d771ed12481 Mon Sep 17 00:00:00 2001 From: dsentinel Date: Tue, 28 Jan 2020 17:41:34 -0700 Subject: [PATCH 18/25] flake, remove extra line --- ph5/utilities/segd2ph5.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ph5/utilities/segd2ph5.py b/ph5/utilities/segd2ph5.py index bbf7b07c..de39c9df 100644 --- a/ph5/utilities/segd2ph5.py +++ b/ph5/utilities/segd2ph5.py @@ -923,7 +923,6 @@ def writeINDEX(): MAP_INFO = {} - def main(): import time then = time.time() From 9c77f1387f217824db6c1fa5740b611f929e7534 Mon Sep 17 00:00:00 2001 From: David Thomas Date: Wed, 29 Jan 2020 17:26:22 -0700 Subject: [PATCH 19/25] Fixed run_geod call syntax, good for merge (hopefully) --- ph5/core/ph5utils.py | 4 ++-- ph5/utilities/tests/test_utmtolatlong.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ph5/core/ph5utils.py b/ph5/core/ph5utils.py index a5328451..01853f12 100644 --- a/ph5/core/ph5utils.py +++ b/ph5/core/ph5utils.py @@ -121,7 +121,7 @@ def latlong2utm(self, lat, lon): return (easting, northing, self.utm_zone, self.hemisphere) -def tspc_lat_long(self, easting, northing): # Texas State Plane Coords +def tspc_lat_long(easting, northing): # Texas State Plane Coords epsg_wgs84 = "EPSG:4326" epsg_sp4202 = "EPSG:2276" # State Plane Coords in US FEET transformer = Transformer.from_crs(epsg_sp4202, epsg_wgs84, @@ -130,7 +130,7 @@ def tspc_lat_long(self, easting, northing): # Texas State Plane Coords return (lon, lat) -def run_geod(self, lat0, lon0, lat1, lon1, scalar=1.0): +def run_geod(lat0, lon0, lat1, lon1, scalar=1.0): ELLIPSOID = 'WGS84' config = "+ellps={0}".format(ELLIPSOID) diff --git a/ph5/utilities/tests/test_utmtolatlong.py b/ph5/utilities/tests/test_utmtolatlong.py index ecafafa9..95e747dd 100644 --- a/ph5/utilities/tests/test_utmtolatlong.py +++ b/ph5/utilities/tests/test_utmtolatlong.py @@ -35,7 +35,7 @@ def test_is_valid_utm_conversion_fancyinverse(self): def test_is_valid_tsp_conversion(self): # Sweetwater, Texas, units US FEET - lon, lat = tspc_lat_long(self, 1380811, 6858888) + lon, lat = tspc_lat_long(1380811, 6858888) self.assertAlmostEqual(lon, -100.40568335900281) self.assertAlmostEqual(lat, 32.468972700219375) From c5b72c3bbbbf0637bbb68324d9c2bd3dddc4d41f Mon Sep 17 00:00:00 2001 From: David Thomas Date: Mon, 24 Feb 2020 18:24:57 -0700 Subject: [PATCH 20/25] restructure and pep8 finished --- ph5/core/ph5api.py | 2 +- ph5/core/ph5utils.py | 92 +++++++++++------------- ph5/core/segyfactory.py | 12 ++-- ph5/utilities/novenqc.py | 2 +- ph5/utilities/segd2ph5.py | 8 +-- ph5/utilities/tests/test_utmtolatlong.py | 47 ++++++------ ph5/utilities/utm_latlong.py | 26 ++++--- 7 files changed, 96 insertions(+), 93 deletions(-) diff --git a/ph5/core/ph5api.py b/ph5/core/ph5api.py index dbb017cf..bfbfe0b3 100755 --- a/ph5/core/ph5api.py +++ b/ph5/core/ph5api.py @@ -14,7 +14,7 @@ from ph5.core import columns, experiment, timedoy, ph5utils from tables.exceptions import NoSuchNodeError -PROG_VERSION = '2020.017' +PROG_VERSION = '2020.055' LOGGER = logging.getLogger(__name__) PH5VERSION = columns.PH5VERSION diff --git a/ph5/core/ph5utils.py b/ph5/core/ph5utils.py index 01853f12..3e03556c 100644 --- a/ph5/core/ph5utils.py +++ b/ph5/core/ph5utils.py @@ -8,6 +8,7 @@ # version: 2019.032 # author: Lan Dam # add inday_breakup() function +# Dave Thomas added UTM / Lat Long routines,consolidated all pyproj here import fnmatch from datetime import datetime, timedelta @@ -18,7 +19,7 @@ from pyproj import Transformer, Geod import math -PROG_VERSION = "2020.017" +PROG_VERSION = "2020.055" class PH5Response(object): @@ -70,60 +71,53 @@ def get_n_i(self, sensor_keys, datalogger_keys): return ph5_resp.n_i -class UTMConversions: - - def __init__(self, lat, lon, side, zone): - self.epsg_wgs84 = "EPSG:4326" - - if side is not None and lat is None: - if side == "S": - epsgroot = "327" - elif side == "N": - epsgroot = "326" - elif lat is not None: - if lat < 0.0: - epsgroot = "327" - side = 'S' - elif lat >= 0.0: - epsgroot = "326" - side = 'N' - - if zone is not None and lon is None: - self.epsg_utm = "EPSG:" + epsgroot + str(zone) - elif lon is not None: - zone = self.lon2zone(lon) - self.epsg_utm = "EPSG:" + epsgroot + str(zone) - - self.utm_zone = zone - self.hemisphere = side - - def lon2zone(self, lon): - ''' Get UTM zone from longitude ''' - zone = int(math.floor((float(lon) + 180.)/6.0) % 60) + 1 - return zone - - def utm2latlong(self, easting, northing): - transformer = Transformer.from_crs(self.epsg_utm, self.epsg_wgs84, - always_xy=True) - lon, lat = transformer.transform(float(easting), float(northing)) - return (lat, lon) +class LatLongToUtmConvert: - def geod2utm(self, lat, lon, elev): # utility function - transformer = Transformer.from_crs(self.epsg_wgs84, self.epsg_utm, - always_xy=True) - easting, northing = transformer.transform(lon, lat) - return (northing, easting, elev) # e.g. Y, X, Z + def __init__(self, lat, lon): + epsg_wgs84 = "EPSG:4326" + + if lat < 0.0: + epsgroot = "327" + self.hemisphere = 'S' + elif lat >= 0.0: + epsgroot = "326" + self.hemisphere = 'N' - def latlong2utm(self, lat, lon): - transformer = Transformer.from_crs(self.epsg_wgs84, self.epsg_utm, + self.utm_zone = int(math.floor((float(lon) + 180.)/6.0) % 60) + 1 + epsg_utm = "EPSG:" + epsgroot + str(self.utm_zone) + transformer = Transformer.from_crs(epsg_wgs84, + epsg_utm, always_xy=True) - easting, northing = transformer.transform(lon, lat) - return (easting, northing, self.utm_zone, self.hemisphere) + self.easting, self.northing = transformer.transform(lon, lat) + + def geod_to_utm(self, elev): + return (self.northing, self.easting, elev) + + def lat_long_to_utm(self): + return (self.easting, self.northing, self.utm_zone, self.hemisphere) + + +def utm_to_lat_long(easting, northing, hemisphere, zone): + + epsg_wgs84 = "EPSG:4326" + + if hemisphere == "S": + epsgroot = "327" + elif hemisphere == "N": + epsgroot = "326" + + epsg_utm = "EPSG:" + epsgroot + str(zone) + + transformer = Transformer.from_crs(epsg_utm, epsg_wgs84, always_xy=True) + lon, lat = transformer.transform(float(easting), float(northing)) + return (lat, lon) -def tspc_lat_long(easting, northing): # Texas State Plane Coords +def tspc_lat_long(easting, northing): + # Texas State Plane Coords + # State Plane Coords in US FEET epsg_wgs84 = "EPSG:4326" - epsg_sp4202 = "EPSG:2276" # State Plane Coords in US FEET + epsg_sp4202 = "EPSG:2276" transformer = Transformer.from_crs(epsg_sp4202, epsg_wgs84, always_xy=True) lon, lat = transformer.transform(easting, northing) diff --git a/ph5/core/segyfactory.py b/ph5/core/segyfactory.py index f2a0aad7..038ea2e6 100644 --- a/ph5/core/segyfactory.py +++ b/ph5/core/segyfactory.py @@ -16,10 +16,10 @@ import string import sys import logging -from ph5.core import ph5utils # for geod2utm, run_geod +from ph5.core import ph5utils # for geod_to_utm, run_geod from ph5.core import segy_h, ebcdic -PROG_VERSION = '2020.015' +PROG_VERSION = '2020.055' LOGGER = logging.getLogger(__name__) os.environ['TZ'] = 'UTC' @@ -789,8 +789,8 @@ def set_trace_header(self): lat = self.array_t['location/Y/value_d'] lon = self.array_t['location/X/value_d'] elev = self.array_t['location/Z/value_d'] - utmc = ph5utils.UTMConversions(lat, lon, None, None) - Y, X, Z = utmc.geod2utm(lat, lon, elev) + utmc = ph5utils.LatLongToUtmConvert(lat, lon) + Y, X, Z = utmc.geod_to_utm(elev) s, vx, vy = pick_values_32(X, Y) tra['coordScale'] = s @@ -846,8 +846,8 @@ def set_trace_header(self): lat = self.event_t['location/Y/value_d'] lon = self.event_t['location/X/value_d'] elev = self.event_t['location/Z/value_d'] - utmc = ph5utils.UTMConversions(lat, lon, None, None) - Y, X, Z = utmc.geod2utm(lat, lon, elev) + utmc = ph5utils.LatLongToUtmConvert(lat, lon) + Y, X, Z = utmc.geod_to_utm(elev) s, vx, vy = pick_values_32(X, Y) tra['sourceLongOrX'] = vx diff --git a/ph5/utilities/novenqc.py b/ph5/utilities/novenqc.py index dea550ff..24c9c679 100644 --- a/ph5/utilities/novenqc.py +++ b/ph5/utilities/novenqc.py @@ -17,7 +17,7 @@ from ph5.core import timedoy -PROG_VERSION = '2020.015' +PROG_VERSION = '2020.055' LOGGER = logging.getLogger(__name__) FACTS = {'km': 1000., 'm': 1., 'dm': 1. / 10., 'cm': 1. / 100., diff --git a/ph5/utilities/segd2ph5.py b/ph5/utilities/segd2ph5.py index de39c9df..77b007d8 100644 --- a/ph5/utilities/segd2ph5.py +++ b/ph5/utilities/segd2ph5.py @@ -19,7 +19,7 @@ from ph5 import LOGGING_FORMAT -PROG_VERSION = "2020.017" +PROG_VERSION = "2020.055" LOGGER = logging.getLogger(__name__) @@ -1084,9 +1084,9 @@ def get_node(sd): 4].receiver_point_X_final / 10. northing = SD.trace_headers.trace_header_N[ 4].receiver_point_Y_final / 10. - utmc = ph5utils.UTMConversions(None, None, - NS, utmzone) - LAT, LON = utmc.utm2latlong(easting, northing) + LAT, LON = ph5utils.utm_to_lat_long( + easting, northing, + NS, utmzone) elif TSPF: # Texas State Plane coordinates LAT, LON = ph5utils.tspc_lat_long( diff --git a/ph5/utilities/tests/test_utmtolatlong.py b/ph5/utilities/tests/test_utmtolatlong.py index 95e747dd..85bb7a2b 100644 --- a/ph5/utilities/tests/test_utmtolatlong.py +++ b/ph5/utilities/tests/test_utmtolatlong.py @@ -1,33 +1,46 @@ import unittest import pyproj -from ph5.core.ph5utils import UTMConversions +from ph5.core.ph5utils import LatLongToUtmConvert +from ph5.core.ph5utils import utm_to_lat_long from ph5.core.ph5utils import tspc_lat_long class TestUTMconversion(unittest.TestCase): def test_is_valid_utm_conversion(self): - u = UTMConversions(None, None, 'N', 13) # PIC Socorro - lat, lon = u.utm2latlong(322916, 3771967) + # location: PIC + lat, lon = utm_to_lat_long(322916, 3771967, 'N', 13) self.assertAlmostEqual(lat, 34.07349577107704) self.assertAlmostEqual(lon, -106.91909595147378) def test_is_valid_utm_conversion_south(self): - u = UTMConversions(None, None, 'S', 58) # Castle Rock Antarctica - lat, lon = u.utm2latlong(540947, 1361594) + # location: Castle Rock, Antarctica + lat, lon = utm_to_lat_long(540947, 1361594, 'S', 58) self.assertAlmostEqual(lat, -77.81567398301094) self.assertAlmostEqual(lon, 166.73816638798527) - def test_is_valid_utm_conversion_inverse(self): - u = UTMConversions(34.0734958, -106.9190959, None, None) # PIC - northing, easting, elev = u.geod2utm(34.0734958, -106.9190959, 1456.0) + def test_for_correct_type(self): + # heck, no are invalid eastings/northings + with self.assertRaises(ValueError): + lat, lon = utm_to_lat_long('heck', 'no', 'S', 58) + + def test_for_correct_value(self): + # 666 is invalid UTM zone + with self.assertRaises(pyproj.exceptions.CRSError): + lat, lon = utm_to_lat_long(540947, 1361594, 'S', 666) + + def test_is_valid_geod_conversion(self): + # location: PIC + u = LatLongToUtmConvert(34.0734958, -106.9190959) + northing, easting, elev = u.geod_to_utm(1456.0) self.assertAlmostEqual(northing, 3771967.003118457) self.assertAlmostEqual(easting, 322916.0048106084) + self.assertAlmostEqual(elev, 1456.0) - def test_is_valid_utm_conversion_fancyinverse(self): - u = UTMConversions(34.0734958, -106.9190959, None, None) # PIC - easting, northing, zone, hemisphere = \ - u.latlong2utm(34.0734958, -106.9190959) + def test_is_valid_latlong_conversion(self): + # location: PIC + u = LatLongToUtmConvert(34.0734958, -106.9190959) + easting, northing, zone, hemisphere = u.lat_long_to_utm() self.assertAlmostEqual(easting, 322916.0048106084) self.assertAlmostEqual(northing, 3771967.003118457) self.assertEqual(zone, 13) @@ -39,16 +52,6 @@ def test_is_valid_tsp_conversion(self): self.assertAlmostEqual(lon, -100.40568335900281) self.assertAlmostEqual(lat, 32.468972700219375) - def test_for_correct_type(self): - with self.assertRaises(ValueError): - u = UTMConversions('heck', 'no', 'S', 58) # 'heck' is not a float - u.utm2latlong('heck', 'no') - - def test_for_correct_value(self): # 666 is invalid UTM zone - with self.assertRaises(pyproj.exceptions.CRSError): - u = UTMConversions(None, None, 'S', 666) - u.utm2latlong(540947, 1361594) - if __name__ == "__main__": unittest.main() diff --git a/ph5/utilities/utm_latlong.py b/ph5/utilities/utm_latlong.py index c99f9229..70bebcee 100644 --- a/ph5/utilities/utm_latlong.py +++ b/ph5/utilities/utm_latlong.py @@ -1,12 +1,12 @@ # This script is used to convert UTM coordinates to/from Latitude/Longitude -# Dave Thomas, 2019-11-26 +# Dave Thomas, 2020-02-24 from __future__ import (print_function) from ph5.core import ph5utils import argparse import logging -PROG_VERSION = '2020.015' +PROG_VERSION = '2020.055' LOGGER = logging.getLogger(__name__) @@ -27,8 +27,8 @@ def convert_file_from_utm(infile, outfile): # batch file method zone = int(fieldset[2]) hemisphere = str(fieldset[3]).upper() try: - utm = ph5utils.UTMConversions(None, None, hemisphere, zone) - lat, lon = utm.utm2latlong(easting, northing) + lat, lon = ph5utils.utm_to_lat_long(easting, northing, + hemisphere, zone) ps = "easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" printstr1 = (ps % (easting, northing, zone, hemisphere)) printstr2 = "Lon = %.7f, Lat = %.7f" % (lon, lat) @@ -56,8 +56,8 @@ def convert_file_from_latlong(infile, outfile): # batch file method lat = float(fieldset[0]) lon = float(fieldset[1]) try: - utm = ph5utils.UTMConversions(lat, lon, None, None) - easting, northing, zone, hemisphere = utm.latlong2utm(lat, lon) + utm = ph5utils.LatLongToUtmConvert(lat, lon) + easting, northing, zone, hemisphere = utm.lat_long_to_utm() ps = "easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" printstr1 = "Lon = %.7f, Lat = %.7f" % (lon, lat) printstr2 = (ps % (easting, northing, zone, hemisphere)) @@ -74,11 +74,12 @@ def doinline_from_utm(easting, northing, zone, hemisphere): # interactive print ("easting=%s, northing=%s, zone=%s, hemisphere=%s" % (easting, northing, zone, hemisphere)) try: - utm = ph5utils.UTMConversions(None, None, hemisphere, zone) indat = ("Input: easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" % (float(easting), float(northing), int(zone), hemisphere)) print (indat) - lat, lon = utm.utm2latlong(float(easting), float(northing)) + + lat, lon = ph5utils.utm_to_lat_long(easting, northing, + hemisphere, zone) outstr = "Lon = %.7f, Lat = %.7f" % (lon, lat) print (outstr) print (''.join('*'*50)) @@ -89,10 +90,15 @@ def doinline_from_utm(easting, northing, zone, hemisphere): # interactive def doinline_from_latlong(lon, lat): # interactive try: - utm = ph5utils.UTMConversions(lat, lon, None, None) + if abs(float(lat)) > 90.0: + msg = "Geodetic Error, your latitude, %.7f, is out of limit." \ + % (float(lat)) + print (msg) + return + utm = ph5utils.LatLongToUtmConvert(lat, lon) indat = "Input: lon=%.7f, lat=%.7f" % (float(lon), float(lat)) print (indat) - easting, northing, zone, hemisphere = utm.latlong2utm(lat, lon) + easting, northing, zone, hemisphere = utm.lat_long_to_utm() outstr = "easting=%.1f, northing=%.1f, zone=%s, hemisphere=%s" \ % (float(easting), float(northing), zone, hemisphere) print (outstr) From 3e5cd6181bc2fbf9bf6dc5dd69feb1c13dd145ae Mon Sep 17 00:00:00 2001 From: David Thomas Date: Wed, 26 Feb 2020 10:42:34 -0700 Subject: [PATCH 21/25] removed pinning of pyproj in environment.yml --- environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index da647403..34212fff 100644 --- a/environment.yml +++ b/environment.yml @@ -11,7 +11,7 @@ dependencies: - nose - numpy - numexpr - - pyproj=2.2 + - pyproj - psutil - obspy - vispy From ade0cf9921bd4f497a8652bd5a3b15cbe31e110d Mon Sep 17 00:00:00 2001 From: David Thomas Date: Thu, 5 Mar 2020 12:18:32 -0700 Subject: [PATCH 22/25] eliminated UTM class, all functions now --- ph5/core/ph5utils.py | 50 ++++++++++++------------ ph5/core/segyfactory.py | 6 +-- ph5/utilities/tests/test_utmtolatlong.py | 26 ++++++++---- ph5/utilities/utm_latlong.py | 8 ++-- 4 files changed, 51 insertions(+), 39 deletions(-) diff --git a/ph5/core/ph5utils.py b/ph5/core/ph5utils.py index 3e03556c..a06a2701 100644 --- a/ph5/core/ph5utils.py +++ b/ph5/core/ph5utils.py @@ -71,30 +71,32 @@ def get_n_i(self, sensor_keys, datalogger_keys): return ph5_resp.n_i -class LatLongToUtmConvert: - - def __init__(self, lat, lon): - epsg_wgs84 = "EPSG:4326" - - if lat < 0.0: - epsgroot = "327" - self.hemisphere = 'S' - elif lat >= 0.0: - epsgroot = "326" - self.hemisphere = 'N' - - self.utm_zone = int(math.floor((float(lon) + 180.)/6.0) % 60) + 1 - epsg_utm = "EPSG:" + epsgroot + str(self.utm_zone) - transformer = Transformer.from_crs(epsg_wgs84, - epsg_utm, - always_xy=True) - self.easting, self.northing = transformer.transform(lon, lat) - - def geod_to_utm(self, elev): - return (self.northing, self.easting, elev) - - def lat_long_to_utm(self): - return (self.easting, self.northing, self.utm_zone, self.hemisphere) +def lat_long_to_utm(lat, lon): + + epsg_wgs84 = "EPSG:4326" + + if lat < 0.0: + epsgroot = "327" + hemisphere = 'S' + elif lat >= 0.0: + epsgroot = "326" + hemisphere = 'N' + + utm_zone = int(math.floor((float(lon) + 180.)/6.0) % 60) + 1 + epsg_utm = "EPSG:" + epsgroot + str(utm_zone) + transformer = Transformer.from_crs(epsg_wgs84, + epsg_utm, + always_xy=True) + easting, northing = transformer.transform(lon, lat) + + return (easting, northing, utm_zone, hemisphere) + + +def geod_to_utm(lat, lon, elev): + + easting, northing, utm_zone, hemisphere = lat_long_to_utm(lat, lon) + + return (northing, easting, elev) def utm_to_lat_long(easting, northing, hemisphere, zone): diff --git a/ph5/core/segyfactory.py b/ph5/core/segyfactory.py index 038ea2e6..fa8ef68d 100644 --- a/ph5/core/segyfactory.py +++ b/ph5/core/segyfactory.py @@ -789,8 +789,7 @@ def set_trace_header(self): lat = self.array_t['location/Y/value_d'] lon = self.array_t['location/X/value_d'] elev = self.array_t['location/Z/value_d'] - utmc = ph5utils.LatLongToUtmConvert(lat, lon) - Y, X, Z = utmc.geod_to_utm(elev) + Y, X, Z = ph5utils.geod_to_utm(lat, lon, elev) s, vx, vy = pick_values_32(X, Y) tra['coordScale'] = s @@ -846,8 +845,7 @@ def set_trace_header(self): lat = self.event_t['location/Y/value_d'] lon = self.event_t['location/X/value_d'] elev = self.event_t['location/Z/value_d'] - utmc = ph5utils.LatLongToUtmConvert(lat, lon) - Y, X, Z = utmc.geod_to_utm(elev) + Y, X, Z = ph5utils.geod_to_utm(lat, lon, elev) s, vx, vy = pick_values_32(X, Y) tra['sourceLongOrX'] = vx diff --git a/ph5/utilities/tests/test_utmtolatlong.py b/ph5/utilities/tests/test_utmtolatlong.py index 85bb7a2b..d2294527 100644 --- a/ph5/utilities/tests/test_utmtolatlong.py +++ b/ph5/utilities/tests/test_utmtolatlong.py @@ -1,13 +1,14 @@ import unittest import pyproj -from ph5.core.ph5utils import LatLongToUtmConvert +from ph5.core.ph5utils import lat_long_to_utm from ph5.core.ph5utils import utm_to_lat_long +from ph5.core.ph5utils import geod_to_utm from ph5.core.ph5utils import tspc_lat_long class TestUTMconversion(unittest.TestCase): - def test_is_valid_utm_conversion(self): + def test_is_valid_utm_conversion_north(self): # location: PIC lat, lon = utm_to_lat_long(322916, 3771967, 'N', 13) self.assertAlmostEqual(lat, 34.07349577107704) @@ -31,21 +32,32 @@ def test_for_correct_value(self): def test_is_valid_geod_conversion(self): # location: PIC - u = LatLongToUtmConvert(34.0734958, -106.9190959) - northing, easting, elev = u.geod_to_utm(1456.0) + northing, easting, elev =\ + geod_to_utm(34.0734958, -106.9190959, 1456.0) self.assertAlmostEqual(northing, 3771967.003118457) self.assertAlmostEqual(easting, 322916.0048106084) self.assertAlmostEqual(elev, 1456.0) - def test_is_valid_latlong_conversion(self): + def test_is_valid_latlong_conversion_north(self): # location: PIC - u = LatLongToUtmConvert(34.0734958, -106.9190959) - easting, northing, zone, hemisphere = u.lat_long_to_utm() + easting, northing, zone, hemisphere =\ + lat_long_to_utm(34.0734958, -106.9190959) + self.assertAlmostEqual(easting, 322916.0048106084) self.assertAlmostEqual(northing, 3771967.003118457) self.assertEqual(zone, 13) self.assertAlmostEqual(hemisphere, 'N') + def test_is_valid_latlong_conversion_south(self): + # location: Castle Rock Antarctica + easting, northing, zone, hemisphere =\ + lat_long_to_utm(-77.81567398301094, 166.73816638798527) + + self.assertAlmostEqual(easting, 540947.0) + self.assertAlmostEqual(northing, 1361594.0) + self.assertEqual(zone, 58) + self.assertAlmostEqual(hemisphere, 'S') + def test_is_valid_tsp_conversion(self): # Sweetwater, Texas, units US FEET lon, lat = tspc_lat_long(1380811, 6858888) diff --git a/ph5/utilities/utm_latlong.py b/ph5/utilities/utm_latlong.py index 70bebcee..9af66e92 100644 --- a/ph5/utilities/utm_latlong.py +++ b/ph5/utilities/utm_latlong.py @@ -56,8 +56,8 @@ def convert_file_from_latlong(infile, outfile): # batch file method lat = float(fieldset[0]) lon = float(fieldset[1]) try: - utm = ph5utils.LatLongToUtmConvert(lat, lon) - easting, northing, zone, hemisphere = utm.lat_long_to_utm() + easting, northing, zone, hemisphere =\ + ph5utils.lat_long_to_utm(lat, lon) ps = "easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" printstr1 = "Lon = %.7f, Lat = %.7f" % (lon, lat) printstr2 = (ps % (easting, northing, zone, hemisphere)) @@ -95,10 +95,10 @@ def doinline_from_latlong(lon, lat): # interactive % (float(lat)) print (msg) return - utm = ph5utils.LatLongToUtmConvert(lat, lon) + easting, northing, zone, hemisphere = \ + ph5utils.lat_long_to_utm(lat, lon) indat = "Input: lon=%.7f, lat=%.7f" % (float(lon), float(lat)) print (indat) - easting, northing, zone, hemisphere = utm.lat_long_to_utm() outstr = "easting=%.1f, northing=%.1f, zone=%s, hemisphere=%s" \ % (float(easting), float(northing), zone, hemisphere) print (outstr) From 98b5ff004ad9aec79f9af6741f41543005fbcc8d Mon Sep 17 00:00:00 2001 From: David Thomas Date: Fri, 6 Mar 2020 11:17:05 -0700 Subject: [PATCH 23/25] updates per Lloyd's review --- ph5/core/ph5api.py | 6 +++--- ph5/core/ph5utils.py | 9 ++++---- ph5/core/segyfactory.py | 8 ++++---- ph5/utilities/novenqc.py | 9 ++++---- ph5/utilities/segd2ph5.py | 2 +- ph5/utilities/tests/test_utmtolatlong.py | 12 +++++++++++ ph5/utilities/utm_latlong.py | 26 +++++++++++++++--------- 7 files changed, 45 insertions(+), 27 deletions(-) diff --git a/ph5/core/ph5api.py b/ph5/core/ph5api.py index bfbfe0b3..703a2f4f 100755 --- a/ph5/core/ph5api.py +++ b/ph5/core/ph5api.py @@ -14,7 +14,7 @@ from ph5.core import columns, experiment, timedoy, ph5utils from tables.exceptions import NoSuchNodeError -PROG_VERSION = '2020.055' +PROG_VERSION = '2020.066' LOGGER = logging.getLogger(__name__) PH5VERSION = columns.PH5VERSION @@ -336,8 +336,8 @@ def get_offset(self, sta_line, sta_id, evt_line, evt_id): lat0 = array_t[0]['location/Y/value_d'] lon1 = event_t['location/X/value_d'] lat1 = event_t['location/Y/value_d'] - az, baz, dist = ph5utils.run_geod(lat0, lon0, lat1, lon1, - FACTS_M[UNITS]) + az, baz, dist = ph5utils.latlon2geod(lat0, lon0, lat1, lon1, + FACTS_M[UNITS]) except Exception as e: LOGGER.warning("Couldn't get offset. {0}".format(repr(e))) return {} diff --git a/ph5/core/ph5utils.py b/ph5/core/ph5utils.py index a06a2701..6cfb88f3 100644 --- a/ph5/core/ph5utils.py +++ b/ph5/core/ph5utils.py @@ -19,7 +19,7 @@ from pyproj import Transformer, Geod import math -PROG_VERSION = "2020.055" +PROG_VERSION = "2020.066" class PH5Response(object): @@ -116,8 +116,7 @@ def utm_to_lat_long(easting, northing, hemisphere, zone): def tspc_lat_long(easting, northing): - # Texas State Plane Coords - # State Plane Coords in US FEET + # Texas State Plane Coords, in US FEET epsg_wgs84 = "EPSG:4326" epsg_sp4202 = "EPSG:2276" transformer = Transformer.from_crs(epsg_sp4202, epsg_wgs84, @@ -126,7 +125,8 @@ def tspc_lat_long(easting, northing): return (lon, lat) -def run_geod(lat0, lon0, lat1, lon1, scalar=1.0): +def latlon2geod(lat0, lon0, lat1, lon1, scalar=1.0): + # Return azimuth, back azimuth, geodetic distance ELLIPSOID = 'WGS84' config = "+ellps={0}".format(ELLIPSOID) @@ -138,7 +138,6 @@ def run_geod(lat0, lon0, lat1, lon1, scalar=1.0): if dist: dist /= scalar - # Return list containing azimuth, back azimuth, distance return az, baz, dist diff --git a/ph5/core/segyfactory.py b/ph5/core/segyfactory.py index fa8ef68d..f8468525 100644 --- a/ph5/core/segyfactory.py +++ b/ph5/core/segyfactory.py @@ -16,10 +16,10 @@ import string import sys import logging -from ph5.core import ph5utils # for geod_to_utm, run_geod +from ph5.core import ph5utils from ph5.core import segy_h, ebcdic -PROG_VERSION = '2020.055' +PROG_VERSION = '2020.066' LOGGER = logging.getLogger(__name__) os.environ['TZ'] = 'UTC' @@ -876,8 +876,8 @@ def set_trace_header(self): lon2 = self.array_t['location/X/value_d'] UNITS = 'm' - az_baz_dist = ph5utils.run_geod(lat1, lon1, lat2, lon2, - FACTS[UNITS]) + az_baz_dist = ph5utils.latlon2geod(lat1, lon1, lat2, lon2, + FACTS[UNITS]) tra['sourceToRecDist'] = az_baz_dist[2] except Exception as e: # sys.stderr.write (e.message) diff --git a/ph5/utilities/novenqc.py b/ph5/utilities/novenqc.py index 24c9c679..8eb2a20a 100644 --- a/ph5/utilities/novenqc.py +++ b/ph5/utilities/novenqc.py @@ -12,12 +12,13 @@ from random import randint from inspect import stack from ast import literal_eval -from ph5.core import ph5utils # for run_geod +from ph5.core import ph5utils import simplekml as kml from ph5.core import timedoy -PROG_VERSION = '2020.055' +PROG_VERSION = '2020.066' + LOGGER = logging.getLogger(__name__) FACTS = {'km': 1000., 'm': 1., 'dm': 1. / 10., 'cm': 1. / 100., @@ -216,8 +217,8 @@ def qc_dist(ys, xs, zs): # Need to check for UTM and convert to lat/lon # units = 'm' - az, baz, dist = ph5utils.run_geod(ys[0], xs[0], ys[1], xs[1], - FACTS[units]) + az, baz, dist = ph5utils.latlon2geod(ys[0], xs[0], ys[1], xs[1], + FACTS[units]) if len(zs) > 1: zdelta = float(zs[1]) - float(zs[0]) else: diff --git a/ph5/utilities/segd2ph5.py b/ph5/utilities/segd2ph5.py index 77b007d8..e182105d 100644 --- a/ph5/utilities/segd2ph5.py +++ b/ph5/utilities/segd2ph5.py @@ -19,7 +19,7 @@ from ph5 import LOGGING_FORMAT -PROG_VERSION = "2020.055" +PROG_VERSION = "2020.066" LOGGER = logging.getLogger(__name__) diff --git a/ph5/utilities/tests/test_utmtolatlong.py b/ph5/utilities/tests/test_utmtolatlong.py index d2294527..535c637b 100644 --- a/ph5/utilities/tests/test_utmtolatlong.py +++ b/ph5/utilities/tests/test_utmtolatlong.py @@ -4,6 +4,9 @@ from ph5.core.ph5utils import utm_to_lat_long from ph5.core.ph5utils import geod_to_utm from ph5.core.ph5utils import tspc_lat_long +from ph5.core.ph5utils import latlon2geod + +PROG_VERSION = '2020.066' class TestUTMconversion(unittest.TestCase): @@ -64,6 +67,15 @@ def test_is_valid_tsp_conversion(self): self.assertAlmostEqual(lon, -100.40568335900281) self.assertAlmostEqual(lat, 32.468972700219375) + def test_is_valid_latlon2geod_conversion(self): + # socorro to albuquerque in miles + az, baz, dist = latlon2geod(34.0543, -106.907, + 35.1053, -106.646, + 1609.344) + self.assertAlmostEqual(az, 11.533074930503515) + self.assertAlmostEqual(baz, -168.3187871800244) + self.assertAlmostEqual(dist, 73.95826059337135) + if __name__ == "__main__": unittest.main() diff --git a/ph5/utilities/utm_latlong.py b/ph5/utilities/utm_latlong.py index 9af66e92..311ba10c 100644 --- a/ph5/utilities/utm_latlong.py +++ b/ph5/utilities/utm_latlong.py @@ -6,11 +6,12 @@ import argparse import logging -PROG_VERSION = '2020.055' +PROG_VERSION = '2020.066' LOGGER = logging.getLogger(__name__) -def convert_file_from_utm(infile, outfile): # batch file method +def convert_file_from_utm(infile, outfile): + # batch file method counter = 0 with open(outfile, "w") as theoutfile, open(infile) as theinfile: ws = "Easting, Northing, Zone, Hemisphere, => Longitude, Latitude\n" @@ -18,7 +19,8 @@ def convert_file_from_utm(infile, outfile): # batch file method for line in theinfile: counter = counter + 1 if counter == 1: - continue # skip header line + # skip header line + continue fieldset = line.strip().split(",") if len(fieldset) <= 1: continue @@ -41,7 +43,8 @@ def convert_file_from_utm(infile, outfile): # batch file method return -def convert_file_from_latlong(infile, outfile): # batch file method +def convert_file_from_latlong(infile, outfile): + # batch file method counter = 0 with open(outfile, "w") as theoutfile, open(infile) as theinfile: ws = "Longitude, Latitude, =>Easting, Northing, Zone, Hemisphere\n" @@ -49,7 +52,8 @@ def convert_file_from_latlong(infile, outfile): # batch file method for line in theinfile: counter = counter + 1 if counter == 1: - continue # skip header line + # skip header line + continue fieldset = line.strip().split(",") if len(fieldset) <= 1: continue @@ -70,9 +74,8 @@ def convert_file_from_latlong(infile, outfile): # batch file method return -def doinline_from_utm(easting, northing, zone, hemisphere): # interactive - print ("easting=%s, northing=%s, zone=%s, hemisphere=%s" - % (easting, northing, zone, hemisphere)) +def doinline_from_utm(easting, northing, zone, hemisphere): + # interactive method try: indat = ("Input: easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" % (float(easting), float(northing), int(zone), hemisphere)) @@ -88,7 +91,8 @@ def doinline_from_utm(easting, northing, zone, hemisphere): # interactive return -def doinline_from_latlong(lon, lat): # interactive +def doinline_from_latlong(lon, lat): + # interactive method try: if abs(float(lat)) > 90.0: msg = "Geodetic Error, your latitude, %.7f, is out of limit." \ @@ -130,6 +134,7 @@ def main(): args = parser.parse_args() + # for batch file method if args.infile is not None and args.outfile is not None: if args.utm == 1: convert_file_from_utm(args.infile, args.outfile) @@ -138,7 +143,8 @@ def main(): else: print ("Error, you must specify either -u/--utm, OR -l/--latlong") - else: # direct method + # for direct method + else: if args.utm == 1: if args.easting is not None and args.northing is not None \ and args.zone is not None and args.side is not None: From 3e62af5a18306e8f36a8e5847a9ce1242b4c53f1 Mon Sep 17 00:00:00 2001 From: David Thomas Date: Fri, 6 Mar 2020 11:51:43 -0700 Subject: [PATCH 24/25] fixed lowercase data entry issue --- ph5/utilities/utm_latlong.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ph5/utilities/utm_latlong.py b/ph5/utilities/utm_latlong.py index 311ba10c..06dd2b8b 100644 --- a/ph5/utilities/utm_latlong.py +++ b/ph5/utilities/utm_latlong.py @@ -149,7 +149,7 @@ def main(): if args.easting is not None and args.northing is not None \ and args.zone is not None and args.side is not None: doinline_from_utm(args.easting, args.northing, - args.zone, args.side) + args.zone, args.side.upper()) else: print ("Error-you must specify easting, northing, zone, side") elif args.latlong == 1: From 91452e9ab5264ebc15ce45020cc93d7fc3ea28c8 Mon Sep 17 00:00:00 2001 From: Dave Thomas Date: Tue, 31 Mar 2020 14:27:07 -0600 Subject: [PATCH 25/25] Added UPS, logger usage, name cleanups --- ph5/core/ph5api.py | 8 +- ph5/core/ph5utils.py | 91 +++++++---- ph5/core/segyfactory.py | 10 +- ph5/entry_points.py | 7 +- ph5/utilities/novenqc.py | 7 +- ph5/utilities/segd2ph5.py | 12 +- ph5/utilities/tests/test_utmtolatlong.py | 77 ++++++---- ph5/utilities/ups_latlong.py | 184 +++++++++++++++++++++++ ph5/utilities/utm_latlong.py | 88 +++++------ 9 files changed, 363 insertions(+), 121 deletions(-) create mode 100644 ph5/utilities/ups_latlong.py diff --git a/ph5/core/ph5api.py b/ph5/core/ph5api.py index 703a2f4f..0f7d2c61 100755 --- a/ph5/core/ph5api.py +++ b/ph5/core/ph5api.py @@ -14,7 +14,7 @@ from ph5.core import columns, experiment, timedoy, ph5utils from tables.exceptions import NoSuchNodeError -PROG_VERSION = '2020.066' +PROG_VERSION = '2020.091' LOGGER = logging.getLogger(__name__) PH5VERSION = columns.PH5VERSION @@ -336,12 +336,12 @@ def get_offset(self, sta_line, sta_id, evt_line, evt_id): lat0 = array_t[0]['location/Y/value_d'] lon1 = event_t['location/X/value_d'] lat1 = event_t['location/Y/value_d'] - az, baz, dist = ph5utils.latlon2geod(lat0, lon0, lat1, lon1, - FACTS_M[UNITS]) + az, baz, dist = ph5utils.lat_lon_to_geod(lat0, lon0, + lat1, lon1, + FACTS_M[UNITS]) except Exception as e: LOGGER.warning("Couldn't get offset. {0}".format(repr(e))) return {} - return {'event_id_s': evt_id, 'receiver_id_s': sta_id, 'azimuth/value_f': az, 'azimuth/units_s': 'degrees', 'offset/value_d': dist, 'offset/units_s': 'm'} diff --git a/ph5/core/ph5utils.py b/ph5/core/ph5utils.py index 6cfb88f3..93e9247e 100644 --- a/ph5/core/ph5utils.py +++ b/ph5/core/ph5utils.py @@ -19,7 +19,7 @@ from pyproj import Transformer, Geod import math -PROG_VERSION = "2020.066" +PROG_VERSION = "2020.091" class PH5Response(object): @@ -71,76 +71,105 @@ def get_n_i(self, sensor_keys, datalogger_keys): return ph5_resp.n_i -def lat_long_to_utm(lat, lon): - +def lat_lon_to_utm(lat, lon): epsg_wgs84 = "EPSG:4326" - if lat < 0.0: epsgroot = "327" hemisphere = 'S' elif lat >= 0.0: epsgroot = "326" hemisphere = 'N' - utm_zone = int(math.floor((float(lon) + 180.)/6.0) % 60) + 1 epsg_utm = "EPSG:" + epsgroot + str(utm_zone) transformer = Transformer.from_crs(epsg_wgs84, epsg_utm, - always_xy=True) - easting, northing = transformer.transform(lon, lat) - - return (easting, northing, utm_zone, hemisphere) + always_xy=False) + easting, northing = transformer.transform(lat, lon) + return (northing, easting, utm_zone, hemisphere) -def geod_to_utm(lat, lon, elev): - - easting, northing, utm_zone, hemisphere = lat_long_to_utm(lat, lon) - +def lat_lon_elev_to_utm(lat, lon, elev): + northing, easting, utm_zone, hemisphere = lat_lon_to_utm(lat, lon) return (northing, easting, elev) -def utm_to_lat_long(easting, northing, hemisphere, zone): - +def utm_to_lat_lon(northing, easting, hemisphere, zone): epsg_wgs84 = "EPSG:4326" - if hemisphere == "S": epsgroot = "327" elif hemisphere == "N": epsgroot = "326" - epsg_utm = "EPSG:" + epsgroot + str(zone) - - transformer = Transformer.from_crs(epsg_utm, epsg_wgs84, always_xy=True) - lon, lat = transformer.transform(float(easting), float(northing)) + transformer = Transformer.from_crs(epsg_utm, epsg_wgs84, always_xy=False) + lat, lon = transformer.transform(float(easting), float(northing)) return (lat, lon) -def tspc_lat_long(easting, northing): +def tspc_lat_lon(northing, easting): # Texas State Plane Coords, in US FEET epsg_wgs84 = "EPSG:4326" epsg_sp4202 = "EPSG:2276" transformer = Transformer.from_crs(epsg_sp4202, epsg_wgs84, - always_xy=True) - lon, lat = transformer.transform(easting, northing) - return (lon, lat) + always_xy=False) + lat, lon = transformer.transform(easting, northing) + return (lat, lon) -def latlon2geod(lat0, lon0, lat1, lon1, scalar=1.0): +def lat_lon_to_geod(lat0, lon0, lat1, lon1, scalar=1.0): # Return azimuth, back azimuth, geodetic distance ELLIPSOID = 'WGS84' - config = "+ellps={0}".format(ELLIPSOID) - g = Geod(config) - az, baz, dist = g.inv(lon0, lat0, lon1, lat1) - - if dist: + if dist > 0.0: dist /= scalar - return az, baz, dist +def lat_lon_to_ups_south(lat, lon): + # Universal Polar Stereographic, southern hemisphere + epsg_wgs84 = "EPSG:4326" + epsg_ups_south = "EPSG:32761" + transformer = Transformer.from_crs(epsg_wgs84, + epsg_ups_south, + always_xy=False) + northing, easting = transformer.transform(lat, lon) + return (northing, easting) + + +def lat_lon_to_ups_north(lat, lon): + # Universal Polar Stereographic, northern hemisphere + epsg_wgs84 = "EPSG:4326" + epsg_ups_north = "EPSG:32661" + transformer = Transformer.from_crs(epsg_wgs84, + epsg_ups_north, + always_xy=False) + northing, easting = transformer.transform(lat, lon) + return (northing, easting) + + +def ups_south_to_lat_lon(northing, easting): + # Universal Polar Stereogaphic, southern hemisphere + epsg_wgs84 = "EPSG:4326" + epsg_ups_south = "EPSG:32761" + transformer = Transformer.from_crs(epsg_ups_south, + epsg_wgs84, + always_xy=False) + lat, lon = transformer.transform(northing, easting) + return (lat, lon) + + +def ups_north_to_lat_lon(northing, easting): + # Universal Polar Stereogaphic, northern hemisphere + epsg_wgs84 = "EPSG:4326" + epsg_ups_north = "EPSG:32661" + transformer = Transformer.from_crs(epsg_ups_north, + epsg_wgs84, + always_xy=False) + lat, lon = transformer.transform(northing, easting) + return (lat, lon) + + """ =============== Utility methods diff --git a/ph5/core/segyfactory.py b/ph5/core/segyfactory.py index f8468525..e51dfb52 100644 --- a/ph5/core/segyfactory.py +++ b/ph5/core/segyfactory.py @@ -19,7 +19,7 @@ from ph5.core import ph5utils from ph5.core import segy_h, ebcdic -PROG_VERSION = '2020.066' +PROG_VERSION = '2020.091' LOGGER = logging.getLogger(__name__) os.environ['TZ'] = 'UTC' @@ -789,7 +789,7 @@ def set_trace_header(self): lat = self.array_t['location/Y/value_d'] lon = self.array_t['location/X/value_d'] elev = self.array_t['location/Z/value_d'] - Y, X, Z = ph5utils.geod_to_utm(lat, lon, elev) + Y, X, Z = ph5utils.lat_lon_elev_to_utm(lat, lon, elev) s, vx, vy = pick_values_32(X, Y) tra['coordScale'] = s @@ -845,7 +845,7 @@ def set_trace_header(self): lat = self.event_t['location/Y/value_d'] lon = self.event_t['location/X/value_d'] elev = self.event_t['location/Z/value_d'] - Y, X, Z = ph5utils.geod_to_utm(lat, lon, elev) + Y, X, Z = ph5utils.lat_lon_elev_to_utm(lat, lon, elev) s, vx, vy = pick_values_32(X, Y) tra['sourceLongOrX'] = vx @@ -876,8 +876,8 @@ def set_trace_header(self): lon2 = self.array_t['location/X/value_d'] UNITS = 'm' - az_baz_dist = ph5utils.latlon2geod(lat1, lon1, lat2, lon2, - FACTS[UNITS]) + az_baz_dist = ph5utils.lat_lon_to_geod(lat1, lon1, lat2, lon2, + FACTS[UNITS]) tra['sourceToRecDist'] = az_baz_dist[2] except Exception as e: # sys.stderr.write (e.message) diff --git a/ph5/entry_points.py b/ph5/entry_points.py index 07048562..80b539e6 100644 --- a/ph5/entry_points.py +++ b/ph5/entry_points.py @@ -5,7 +5,7 @@ # Dave Thomas, 2019-08-06 -PROG_VERSION = '2019.330' +PROG_VERSION = '2020.091' class EntryPointTypes(): @@ -282,5 +282,10 @@ def __init__(self): 'Converts UTM coordinates to/from' ' Latitudes & Longitudes.', type=EntryPointTypes.INGESTION), + EntryPoint('ups_latlong', + 'ph5.utilities.ups_latlong:main', + 'Converts UPS coordinates to/from' + ' Latitudes & Longitudes.', + type=EntryPointTypes.INGESTION), ] } diff --git a/ph5/utilities/novenqc.py b/ph5/utilities/novenqc.py index 8eb2a20a..658f71ed 100644 --- a/ph5/utilities/novenqc.py +++ b/ph5/utilities/novenqc.py @@ -17,7 +17,7 @@ from ph5.core import timedoy -PROG_VERSION = '2020.066' +PROG_VERSION = '2020.091' LOGGER = logging.getLogger(__name__) @@ -217,8 +217,9 @@ def qc_dist(ys, xs, zs): # Need to check for UTM and convert to lat/lon # units = 'm' - az, baz, dist = ph5utils.latlon2geod(ys[0], xs[0], ys[1], xs[1], - FACTS[units]) + az, baz, dist = ph5utils.lat_lon_to_geod(ys[0], xs[0], + ys[1], xs[1], + FACTS[units]) if len(zs) > 1: zdelta = float(zs[1]) - float(zs[0]) else: diff --git a/ph5/utilities/segd2ph5.py b/ph5/utilities/segd2ph5.py index e182105d..42df5278 100644 --- a/ph5/utilities/segd2ph5.py +++ b/ph5/utilities/segd2ph5.py @@ -19,7 +19,7 @@ from ph5 import LOGGING_FORMAT -PROG_VERSION = "2020.066" +PROG_VERSION = "2020.085" LOGGER = logging.getLogger(__name__) @@ -1084,16 +1084,16 @@ def get_node(sd): 4].receiver_point_X_final / 10. northing = SD.trace_headers.trace_header_N[ 4].receiver_point_Y_final / 10. - LAT, LON = ph5utils.utm_to_lat_long( - easting, northing, + LAT, LON = ph5utils.utm_to_lat_lon( + northing, easting, NS, utmzone) elif TSPF: # Texas State Plane coordinates - LAT, LON = ph5utils.tspc_lat_long( + LAT, LON = ph5utils.tspc_lat_lon( SD.trace_headers.trace_header_N[ - 4].receiver_point_X_final / 10., + 4].receiver_point_Y_final / 10., SD.trace_headers.trace_header_N[ - 4].receiver_point_Y_final / 10.) + 4].receiver_point_X_final / 10.) else: LAT = SD.trace_headers.trace_header_N[ 4].receiver_point_Y_final / 10. diff --git a/ph5/utilities/tests/test_utmtolatlong.py b/ph5/utilities/tests/test_utmtolatlong.py index 535c637b..6471f0a2 100644 --- a/ph5/utilities/tests/test_utmtolatlong.py +++ b/ph5/utilities/tests/test_utmtolatlong.py @@ -1,81 +1,104 @@ import unittest import pyproj -from ph5.core.ph5utils import lat_long_to_utm -from ph5.core.ph5utils import utm_to_lat_long -from ph5.core.ph5utils import geod_to_utm -from ph5.core.ph5utils import tspc_lat_long -from ph5.core.ph5utils import latlon2geod +from ph5.core.ph5utils import lat_lon_to_utm +from ph5.core.ph5utils import utm_to_lat_lon +from ph5.core.ph5utils import lat_lon_elev_to_utm +from ph5.core.ph5utils import tspc_lat_lon +from ph5.core.ph5utils import lat_lon_to_geod +from ph5.core.ph5utils import lat_lon_to_ups_north +from ph5.core.ph5utils import lat_lon_to_ups_south +from ph5.core.ph5utils import ups_north_to_lat_lon +from ph5.core.ph5utils import ups_south_to_lat_lon -PROG_VERSION = '2020.066' + +PROG_VERSION = '2020.091' class TestUTMconversion(unittest.TestCase): def test_is_valid_utm_conversion_north(self): # location: PIC - lat, lon = utm_to_lat_long(322916, 3771967, 'N', 13) + lat, lon = utm_to_lat_lon(3771967, 322916, 'N', 13) self.assertAlmostEqual(lat, 34.07349577107704) self.assertAlmostEqual(lon, -106.91909595147378) def test_is_valid_utm_conversion_south(self): # location: Castle Rock, Antarctica - lat, lon = utm_to_lat_long(540947, 1361594, 'S', 58) + lat, lon = utm_to_lat_lon(1361594, 540947, 'S', 58) self.assertAlmostEqual(lat, -77.81567398301094) self.assertAlmostEqual(lon, 166.73816638798527) def test_for_correct_type(self): # heck, no are invalid eastings/northings with self.assertRaises(ValueError): - lat, lon = utm_to_lat_long('heck', 'no', 'S', 58) + lat, lon = utm_to_lat_lon('heck', 'no', 'S', 58) def test_for_correct_value(self): # 666 is invalid UTM zone with self.assertRaises(pyproj.exceptions.CRSError): - lat, lon = utm_to_lat_long(540947, 1361594, 'S', 666) + lat, lon = utm_to_lat_lon(1361594, 540947, 'S', 666) def test_is_valid_geod_conversion(self): # location: PIC northing, easting, elev =\ - geod_to_utm(34.0734958, -106.9190959, 1456.0) + lat_lon_elev_to_utm(34.0734958, -106.9190959, 1456.0) self.assertAlmostEqual(northing, 3771967.003118457) self.assertAlmostEqual(easting, 322916.0048106084) self.assertAlmostEqual(elev, 1456.0) - def test_is_valid_latlong_conversion_north(self): + def test_is_valid_utm_latlong_conversion_north(self): # location: PIC - easting, northing, zone, hemisphere =\ - lat_long_to_utm(34.0734958, -106.9190959) - - self.assertAlmostEqual(easting, 322916.0048106084) + northing, easting, zone, hemisphere =\ + lat_lon_to_utm(34.0734958, -106.9190959) self.assertAlmostEqual(northing, 3771967.003118457) + self.assertAlmostEqual(easting, 322916.0048106084) self.assertEqual(zone, 13) self.assertAlmostEqual(hemisphere, 'N') - def test_is_valid_latlong_conversion_south(self): + def test_is_valid_utm_latlong_conversion_south(self): # location: Castle Rock Antarctica - easting, northing, zone, hemisphere =\ - lat_long_to_utm(-77.81567398301094, 166.73816638798527) - - self.assertAlmostEqual(easting, 540947.0) + northing, easting, zone, hemisphere =\ + lat_lon_to_utm(-77.81567398301094, 166.73816638798527) self.assertAlmostEqual(northing, 1361594.0) + self.assertAlmostEqual(easting, 540947.0) self.assertEqual(zone, 58) self.assertAlmostEqual(hemisphere, 'S') def test_is_valid_tsp_conversion(self): # Sweetwater, Texas, units US FEET - lon, lat = tspc_lat_long(1380811, 6858888) - self.assertAlmostEqual(lon, -100.40568335900281) + lat, lon = tspc_lat_lon(6858888, 1380811) self.assertAlmostEqual(lat, 32.468972700219375) + self.assertAlmostEqual(lon, -100.40568335900281) - def test_is_valid_latlon2geod_conversion(self): + def test_is_valid_lat_lon_to_geod_conversion(self): # socorro to albuquerque in miles - az, baz, dist = latlon2geod(34.0543, -106.907, - 35.1053, -106.646, - 1609.344) + az, baz, dist = lat_lon_to_geod(34.0543, -106.907, + 35.1053, -106.646, + 1609.344) self.assertAlmostEqual(az, 11.533074930503515) self.assertAlmostEqual(baz, -168.3187871800244) self.assertAlmostEqual(dist, 73.95826059337135) + def test_is_valid_ups_conversion_north(self): + lat, lon = ups_north_to_lat_lon(1500000.0, 2500000.0) + self.assertAlmostEqual(lat, 83.63731756105707) + self.assertAlmostEqual(lon, 45.0) + + def test_is_valid_ups_conversion_south(self): + lat, lon = ups_south_to_lat_lon(1500000.0, 2500000.0) + self.assertAlmostEqual(lat, -83.63731756105707) + self.assertAlmostEqual(lon, 135.0) + + def test_is_valid_ups_latlong_conversion_north(self): + northing, easting = lat_lon_to_ups_north(83.63731756105707, 135.) + self.assertAlmostEqual(northing, 2500000.0) + self.assertAlmostEqual(easting, 2500000.0) + + def test_is_valid_ups_latlong_conversion_south(self): + northing, easting = lat_lon_to_ups_south(-83.63731756105707, 45.) + self.assertAlmostEqual(northing, 2500000.0) + self.assertAlmostEqual(easting, 2500000.0) + if __name__ == "__main__": unittest.main() diff --git a/ph5/utilities/ups_latlong.py b/ph5/utilities/ups_latlong.py new file mode 100644 index 00000000..e2adbaf4 --- /dev/null +++ b/ph5/utilities/ups_latlong.py @@ -0,0 +1,184 @@ +# This script is used to convert UPS coordinates to/from Latitude/Longitude +# Dave Thomas, 2020-03-27 + +from __future__ import (print_function) +from ph5.core import ph5utils +import argparse +import logging + +PROG_VERSION = '2020.091' +LOGGER = logging.getLogger(__name__) + + +def convert_file_from_ups(infile, outfile): + # batch file method + counter = 0 + with open(outfile, "w") as theoutfile, open(infile) as theinfile: + ws = "Hemisphere, Northing, Easting, => Latitude, Longitude\n" + theoutfile.write(ws) + for line in theinfile: + counter = counter + 1 + if counter == 1: + # skip header line + continue + fieldset = line.strip().split(",") + if len(fieldset) <= 1: + continue + hemisphere = str(fieldset[0]).upper() + northing = float(fieldset[1]) + easting = float(fieldset[2]) + try: + if hemisphere == 'N': + lat, lon = ph5utils.ups_north_to_lat_lon(northing, easting) + else: + # default to southern hemisphere + lat, lon = ph5utils.ups_south_to_lat_lon(northing, easting) + ps = "hemisphere=%s, northing=%.1f, easting=%.1f" + printstr1 = (ps % (hemisphere, northing, easting)) + printstr2 = "Lat = %.7f, Lon = %.7f" % (lat, lon) + LOGGER.info(printstr1 + " => " + printstr2) + outstr = "%s, %.1f, %.1f, %.7f, %.7f" \ + % (hemisphere, northing, easting, lat, lon) + theoutfile.write(outstr + "\n") + except ValueError: + LOGGER.error("There was an error in ups data conversion.") + return + + +def convert_file_from_latlong(infile, outfile): + # batch file method + counter = 0 + with open(outfile, "w") as theoutfile, open(infile) as theinfile: + ws = "Latitude, Longitude, => Hemisphere, Northing, Easting\n" + theoutfile.write(ws) + for line in theinfile: + counter = counter + 1 + if counter == 1: + # skip header line + continue + fieldset = line.strip().split(",") + if len(fieldset) <= 1: + continue + lat = float(fieldset[0]) + lon = float(fieldset[1]) + try: + if lat > 0.0: + hemisphere = 'N' + northing, easting =\ + ph5utils.lat_lon_to_ups_north(lat, lon) + else: + hemisphere = 'S' + northing, easting =\ + ph5utils.lat_lon_to_ups_south(lat, lon) + ps = "hemisphere=%s, northing=%.1f, easting=%.1f" + printstr1 = "Lat = %.7f, Lon = %.7f" % (lat, lon) + printstr2 = (ps % (hemisphere, northing, easting)) + LOGGER.info(printstr1 + " => " + printstr2) + outstr = "%.7f, %.7f, %s, %.1f, %.1f" \ + % (lat, lon, hemisphere, northing, easting) + theoutfile.write(outstr + "\n") + except ValueError: + LOGGER.error("There was an error in ups data conversion.") + return + + +def doinline_from_ups(easting, northing, hemisphere='S'): + # interactive method + try: + indat = ("Input: easting=%.1f, northing=%.1f, hemisphere=%s" + % (float(easting), float(northing), hemisphere)) + LOGGER.info(indat) + + if hemisphere == 'N': + lat, lon = ph5utils.ups_north_to_lat_lon(northing, easting) + else: + lat, lon = ph5utils.ups_south_to_lat_lon(northing, easting) + outstr = "Lat = %.7f, Lon = %.7f" % (lat, lon) + LOGGER.info(outstr) + LOGGER.info(''.join('*'*50)) + except ValueError: + LOGGER.error("There was an error in ups data conversion.") + return + + +def doinline_from_latlong(lat, lon): + # interactive method + try: + if abs(float(lat)) > 90.0: + LOGGER.error( + "Your latitude, {0}, is out of limit." + .format(lat)) + return + if float(lat) > 0.0: + hemisphere = 'Northern' + northing, easting = \ + ph5utils.lat_lon_to_ups_north(float(lat), float(lon)) + else: + hemisphere = 'Southern' + northing, easting = \ + ph5utils.lat_lon_to_ups_south(float(lat), float(lon)) + indat = "Input: lat=%.7f, lon=%.7f" % (float(lat), float(lon)) + LOGGER.info(indat) + outstr = "%s hemisphere, northing =%.1f, easting =%.1f" \ + % (hemisphere, float(northing), float(easting)) + LOGGER.info(outstr) + LOGGER.info(''.join('*'*50)) + except ValueError: + LOGGER.error("There was an error in ups data conversion.") + return + + +def main(): + desc = 'Convert UPS locations to/from longitudes, latitudes, \ +in batches (-i, -o) or inline (-e,-n,-s), (-x,-y)' + parser = argparse.ArgumentParser(description=desc) + group1 = parser.add_mutually_exclusive_group() + group1.add_argument("-u", "--ups", help="Convert UPS to Lat/Long", + action="store_true") + group1.add_argument("-l", "--latlong", help="Convert Lat/Long to UPS", + action="store_true") + parser.add_argument("-i", "--infile", help="Input file with UPS \ +Easting, Northing, Hemisphere, or Lats and Longs") + parser.add_argument("-o", "--outfile", help="Output file with \ +converted longitudes, latitudes or ups Coordinates") + parser.add_argument("-e", "--easting", help="Enter UPS Easting") + parser.add_argument("-n", "--northing", help="Enter UPS Northing") + parser.add_argument("-s", "--side", help="Side=Hemisphere, N or S") + parser.add_argument("-x", "--longitude", help="Enter longitude") + parser.add_argument("-y", "--latitude", help="Enter latitude") + + args = parser.parse_args() + + # for batch file method + if args.infile is not None and args.outfile is not None: + if args.ups == 1: + convert_file_from_ups(args.infile, args.outfile) + elif args.latlong == 1: + convert_file_from_latlong(args.infile, args.outfile) + else: + LOGGER.error("You must specify either -u/--ups, OR -l/--latlong") + + # for direct method + else: + if args.ups == 1: + if args.easting is not None and args.northing is not None: + if args.side is not None: + doinline_from_ups(args.easting, args.northing, + args.side.upper()) + else: + # default = Southern Hemisphere + doinline_from_ups(args.easting, args.northing, + 'S') + else: + LOGGER.error("You must specify northing and easting") + elif args.latlong == 1: + if args.longitude is not None and args.latitude is not None: + doinline_from_latlong(args.latitude, args.longitude) + else: + LOGGER.error("You must specify longitude and latitude.") + else: + LOGGER.error("You must choose -u/--ups, OR -l/--latlong") + + +if __name__ == '__main__': + main() diff --git a/ph5/utilities/utm_latlong.py b/ph5/utilities/utm_latlong.py index 06dd2b8b..24be35dd 100644 --- a/ph5/utilities/utm_latlong.py +++ b/ph5/utilities/utm_latlong.py @@ -6,7 +6,7 @@ import argparse import logging -PROG_VERSION = '2020.066' +PROG_VERSION = '2020.091' LOGGER = logging.getLogger(__name__) @@ -14,7 +14,7 @@ def convert_file_from_utm(infile, outfile): # batch file method counter = 0 with open(outfile, "w") as theoutfile, open(infile) as theinfile: - ws = "Easting, Northing, Zone, Hemisphere, => Longitude, Latitude\n" + ws = "Northing, Easting, Zone, Hemisphere, => Latitude, Longitude\n" theoutfile.write(ws) for line in theinfile: counter = counter + 1 @@ -29,17 +29,17 @@ def convert_file_from_utm(infile, outfile): zone = int(fieldset[2]) hemisphere = str(fieldset[3]).upper() try: - lat, lon = ph5utils.utm_to_lat_long(easting, northing, - hemisphere, zone) - ps = "easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" - printstr1 = (ps % (easting, northing, zone, hemisphere)) - printstr2 = "Lon = %.7f, Lat = %.7f" % (lon, lat) - print (printstr1 + " => " + printstr2) + lat, lon = ph5utils.utm_to_lat_lon(northing, easting, + hemisphere, zone) + ps = "northing=%.1f, easting=%.1f, zone=%d, hemisphere=%s" + printstr1 = (ps % (northing, easting, zone, hemisphere)) + printstr2 = "Lat = %.7f, Lon = %.7f" % (lat, lon) + LOGGER.info(printstr1 + " => " + printstr2) outstr = "%.1f, %.1f, %d, %s, %.7f, %.7f" \ - % (easting, northing, zone, hemisphere, lon, lat) + % (northing, easting, zone, hemisphere, lat, lon) theoutfile.write(outstr + "\n") except ValueError: - print ("There was an error in UTM file conversion.") + LOGGER.error("There was an error in UTM data conversion.") return @@ -47,7 +47,7 @@ def convert_file_from_latlong(infile, outfile): # batch file method counter = 0 with open(outfile, "w") as theoutfile, open(infile) as theinfile: - ws = "Longitude, Latitude, =>Easting, Northing, Zone, Hemisphere\n" + ws = "Latitude, Longitude, =>Northing, Easting, Zone, Hemisphere\n" theoutfile.write(ws) for line in theinfile: counter = counter + 1 @@ -60,17 +60,17 @@ def convert_file_from_latlong(infile, outfile): lat = float(fieldset[0]) lon = float(fieldset[1]) try: - easting, northing, zone, hemisphere =\ - ph5utils.lat_long_to_utm(lat, lon) - ps = "easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" - printstr1 = "Lon = %.7f, Lat = %.7f" % (lon, lat) - printstr2 = (ps % (easting, northing, zone, hemisphere)) - print (printstr1 + " => " + printstr2) + northing, easting, zone, hemisphere =\ + ph5utils.lat_lon_to_utm(lat, lon) + ps = "northing=%.1f, easting=%.1f, zone=%d, hemisphere=%s" + printstr1 = "Lat = %.7f, Lon = %.7f" % (lat, lon) + printstr2 = (ps % (northing, easting, zone, hemisphere)) + LOGGER.info(printstr1 + " => " + printstr2) outstr = "%.7f, %.7f, %.1f, %.1f, %d, %s" \ - % (lon, lat, easting, northing, zone, hemisphere) + % (lat, lon, northing, easting, zone, hemisphere) theoutfile.write(outstr + "\n") except ValueError: - print ("There was an error in UTM file conversion.") + LOGGER.error("There was an error in UTM data conversion.") return @@ -79,36 +79,36 @@ def doinline_from_utm(easting, northing, zone, hemisphere): try: indat = ("Input: easting=%.1f, northing=%.1f, zone=%d, hemisphere=%s" % (float(easting), float(northing), int(zone), hemisphere)) - print (indat) + LOGGER.info(indat) - lat, lon = ph5utils.utm_to_lat_long(easting, northing, - hemisphere, zone) - outstr = "Lon = %.7f, Lat = %.7f" % (lon, lat) - print (outstr) - print (''.join('*'*50)) + lat, lon = ph5utils.utm_to_lat_lon(northing, easting, + hemisphere, zone) + outstr = "Lat = %.7f, Lon = %.7f" % (lat, lon) + LOGGER.info(outstr) + LOGGER.info(''.join('*'*50)) except ValueError: - print ("There was an error in UTM data conversion.") + LOGGER.error("There was an error in UTM data conversion.") return -def doinline_from_latlong(lon, lat): +def doinline_from_latlong(lat, lon): # interactive method try: if abs(float(lat)) > 90.0: - msg = "Geodetic Error, your latitude, %.7f, is out of limit." \ - % (float(lat)) - print (msg) + LOGGER.error( + "Your latitude, {0}, is out of limit." + .format(lat)) return - easting, northing, zone, hemisphere = \ - ph5utils.lat_long_to_utm(lat, lon) - indat = "Input: lon=%.7f, lat=%.7f" % (float(lon), float(lat)) - print (indat) - outstr = "easting=%.1f, northing=%.1f, zone=%s, hemisphere=%s" \ - % (float(easting), float(northing), zone, hemisphere) - print (outstr) - print (''.join('*'*50)) + northing, easting, zone, hemisphere = \ + ph5utils.lat_lon_to_utm(float(lat), float(lon)) + indat = "Input: lat=%.7f, lon=%.7f" % (float(lat), float(lon)) + LOGGER.info(indat) + outstr = "northing =%.1f, easting =%.1f, zone =%s, hemisphere =%s" \ + % (float(northing), float(easting), zone, hemisphere) + LOGGER.info(outstr) + LOGGER.info(''.join('*'*50)) except ValueError: - print ("There was an error in UTM data conversion.") + LOGGER.error("There was an error in UTM data conversion.") return @@ -141,7 +141,7 @@ def main(): elif args.latlong == 1: convert_file_from_latlong(args.infile, args.outfile) else: - print ("Error, you must specify either -u/--utm, OR -l/--latlong") + LOGGER.error("You must specify either -u/--utm, OR -l/--latlong") # for direct method else: @@ -151,14 +151,14 @@ def main(): doinline_from_utm(args.easting, args.northing, args.zone, args.side.upper()) else: - print ("Error-you must specify easting, northing, zone, side") + LOGGER.error("You must specify northing,easting,zone, & side") elif args.latlong == 1: if args.longitude is not None and args.latitude is not None: - doinline_from_latlong(args.longitude, args.latitude) + doinline_from_latlong(args.latitude, args.longitude) else: - print ("Error, you must specify longitude and latitude.") + LOGGER.error("You must specify longitude and latitude.") else: - print ("Error, you must choose -u/--utm, OR -l/--latlong") + LOGGER.error("You must choose -u/--utm, OR -l/--latlong") if __name__ == '__main__':