# Use this workflow when all TIFF files are already downloaded import sys import os from osgeo import gdal, ogr, osr from Raster_Analysis import find_raster_summit from LPC_Analysis import get_lpc_point from Output import set_output_feature, tnm_api_download_raster, TNMAPIException from Point import Point from dataset import Dataset import time import pathlib import Input import traceback from multiprocessing.pool import ThreadPool as Pool from multiprocessing import Manager, Lock # , Pool def truncate(num, n): integer = int(num * (10**n))/(10**n) return float(integer) def get_bbox(dx, dy, in_srs): """ Args: dx: x coordinate of point you want the bbox for dy: y coordinate of point you want the bbox for in_srs: Input spatial reference """ # Transform from Web Mercator to WGS84 if str(in_srs.GetAttrValue("Unit")).upper() != "DEGREE": targetSR = osr.SpatialReference() targetSR.ImportFromEPSG(4326) # WGS84 targetSR.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) trans_point = ogr.Geometry(ogr.wkbPoint) trans_point.AddPoint(dx, dx) # should this be dx twice? trans_point.AssignSpatialReference(in_srs) trans_point.TransformTo(targetSR) bbox_xmin = trans_point.GetX() bbox_xmax = trans_point.GetX() bbox_ymin = trans_point.GetY() bbox_ymax = trans_point.GetY() else: # Build bbox bbox_xmin = dx bbox_xmax = dx bbox_ymin = dy bbox_ymax = dy return "{},{},{},{}".format(bbox_xmin, bbox_ymin, bbox_xmax, bbox_ymax) def process_pt(in_pt, output_layer, in_lock): """ Args: in_pt: original coordinates for the summit output_layer: output datasource? in_lock: manager for multiprocessing Returns: in_pt as a Point and with information about the location of the new summit and the datasets that were used to find it """ try: in_pt = Point(in_pt[0], in_pt[1], in_pt[2], in_pt[3]) # Get the 1/3 dataset from tnm ndv_13_smt_names = ["Mount Favorite", "Amulik Hill", "Blackthorn Peak", "Tarn Mountain", "Bingham Peak", "Church Peak"] """if in_pt.name in ndv_13_smt_names: with in_lock: print("Processing summit: " + in_pt.name) print("found bad peak") else: return""" print(f"Processing {in_pt.name}") # Get points bbox based on most recent data srs = output_layer.GetSpatialRef() # Get the dataset object resolution = "National Elevation Dataset (NED) 1/3 arc-second" dataset = Dataset(resolution, location=args.one_third_tile) # Add dataset object to list of points datasets in_pt.datasets.append(dataset) current_ds = in_pt.datasets[-1] # Find the summit dx = dy = raster_srs = added = None try: dx, dy, raster_srs, added = find_raster_summit(in_pt, buffer, srs, in_lock) except Exception as e: with in_lock: print(str(e)) while dx and dy and raster_srs: # Get bbox from pixel coordinates to try and pull next tile bbox = get_bbox(dx, dy, raster_srs) # returned in raster SRS, used to transform to lat/lon # use bbox to query TNM for a tile that it intersects new_dataset = tnm_api_download_raster(bbox, out_loc_13, resolution, in_lock, True, args.debug, dataset.bad_location) if not new_dataset: break # if the return has a different tile name we want to store it to check already_in = False for data_file in current_ds.location: if data_file == new_dataset.location[0]: already_in = True break if not already_in: in_pt.datasets[-1].location.append(new_dataset.location[0]) try: dx, dy, raster_srs, added = find_raster_summit(in_pt, buffer, srs, in_lock, added) except Exception as e: dx = dy = raster_srs = added = None with in_lock: print(str(e)) traceback.print_exc() # continue with higher resolution data if download: # TODO: Test next resolution multi-tile point finding # Get new bbox based on newest data bbox = in_pt.get_bbox(srs) # Check for next resolution resolution = in_pt.get_next_resolution() if resolution: # download data dataset = Dataset(resolution.split(',')[1], location=args.five_meter_tile) in_pt.datasets.append(dataset) if dataset: # Find the summit try: dx, dy, raster_srs, added = find_raster_summit(in_pt, buffer, srs, in_lock) except Exception as e: dx = dy = raster_srs = added = None with in_lock: print(str(e)) traceback.print_exc() while dx and dy and raster_srs: bbox = get_bbox(dx, dy, raster_srs) # Try downloading files that aren't in the "bad list" # this will work for now but if and when we set up the 1m stuff it will need it's own block new_dataset = tnm_api_download_raster(bbox, out_loc_5m, resolution, in_lock, True, args.debug, dataset.bad_location) # if the return has a different tile name we want to store it to check # needs work -> need to go back and try again with bigger buffer instead of just quitting if not new_dataset: with in_lock: print(f"No data found for {in_pt.name} at new point, returning to original pt and increasing buffer") dx = dy = raster_srs = None break else: already_in = False for data_file in dataset.location: if data_file == new_dataset.location[0]: already_in = True break if not already_in: in_pt.datasets[-1].location.append(new_dataset.location[0]) try: dx, dy, raster_srs, added = find_raster_summit(in_pt, buffer, srs, in_lock, added) except Exception as e: dx = dy = raster_srs = added = None with in_lock: print(str(e)) traceback.print_exc() resolution = in_pt.get_next_resolution() # TODO: re work the LPC if resolution == 'Lidar Point Cloud (LPC)': pdal_bbox = in_pt.get_pdal_bbox() # download data dataset = tnm_api_download_raster(bbox, out_loc_lpc, resolution, in_feature_class_srs, args.debug) # Add dataset object to list of points datasets in_pt.datasets.append(dataset) if in_pt.get_most_recent_data(): get_lpc_point(in_pt, in_pt.get_most_recent_data().location, pdal_bbox) else: with in_lock: print("Last dataset resolution available for summit {0}: {1}".format(in_pt.name, in_pt.datasets.datasets[-1].type)) except TNMAPIException as e: with in_lock: print(e) traceback.print_exc() except Exception as e: with in_lock: print(e) traceback.print_exc() finally: # do we want this in a finally clause? It'll run even if there are exceptions # Create the output feature feature = ogr.Feature(output_layer.GetLayerDefn()) # Set the data for the output feature and create it with in_lock: if in_pt.original_x and in_pt.original_y: set_output_feature(feature, in_pt, output_layer) print("Summit processed for: " + in_pt.name) else: pass #print("Process failed for: " + in_pt.name) #with open("no_orig_coords.txt", 'w') as file: #file.write(in_pt.name + '\n') feature = None return in_pt if __name__ == '__main__': print("starting") start_time = time.time() # Stop GDAL printing both warnings and errors to STDERR # gdal.PushErrorHandler('CPLQuietErrorHandler') # Make GDAL raise python exceptions for errors (warnings won't raise an exception) gdal.UseExceptions() gdal.AllRegister() # Parse arguments try: args = Input.parse_input_args(sys.argv[1:]) except: sys.exit() buffer = args.buffer download = args.download # Location to save downloaded file(s) # out_loc = pathlib.Path(args.output_shapefile).parent.resolve() out_loc_13 = "full_test/TIFFs/13" out_loc_5m = "full_test/TIFFs/5m" out_loc_1m = "full_test/TIFFs/1m" out_loc_lpc = "full_test/TIFFs/lpc" # Get some info from the input feature class we will need later in_fc_driver = ogr.GetDriverByName("ESRI Shapefile") in_fc_dataSource = in_fc_driver.Open(args.input_feature_class, 0) in_fc_layer = in_fc_dataSource.GetLayer() in_feature_class_srs = in_fc_layer.GetSpatialRef() in_fc_lyr_def = in_fc_layer.GetLayerDefn() # Create our output: out_ds = in_fc_driver.CreateDataSource(args.output_shapefile) # TODO: determine point type out_layer = out_ds.CreateLayer("Corrected_Summits", in_feature_class_srs, ogr.wkbPoint) # Copy fields from the input for i in range(in_fc_lyr_def.GetFieldCount()): out_layer.CreateField(in_fc_lyr_def.GetFieldDefn(i)) # create dict structure to hold point values field_dict = {} for i in range(out_layer.GetLayerDefn().GetFieldCount()): field_dict[out_layer.GetLayerDefn().GetFieldDefn(i).GetName()] = None print("creating fields") # Create a distance field out_layer.CreateField(ogr.FieldDefn("Distance", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("Orig_Z", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("Orig_X", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("Orig_Y", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("One_3rd_Z", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("Five_M_Z", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("One_M_Z", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("LPC_Z_Valu", ogr.OFTReal)) # e left off name for 10 char limit out_layer.CreateField(ogr.FieldDefn("Final_z", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("Final_Res", ogr.OFTString)) out_layer.CreateField(ogr.FieldDefn("ZDiff", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("Final_X", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("Final_Y", ogr.OFTReal)) points = [] # Loop through features and add to list of points for feat in in_fc_layer: # Shallow copy fields from input layerdefn with empty values point_dict = field_dict.copy() # Iterate through fields and fill with feature values for i in range(feat.GetFieldCount()): point_dict[in_fc_lyr_def.GetFieldDefn(i).name] = feat.GetField(i) sourceSR = in_fc_layer.GetSpatialRef() geom = feat.GetGeometryRef() if str(geom.GetGeometryName()) == "MULTIPOINT": # find unique ID u_id = point_dict['gaz_id'] for inner_geom in geom: points.append([inner_geom.GetX(), inner_geom.GetY(), point_dict, u_id]) else: points.append([geom.GetX(), geom.GetY(), point_dict, None]) if args.multi: # multiprocess points with Manager() as manager: in_lock = manager.Lock() feats_to_process = [] # Loop through features for pt in points: feats_to_process.append([pt, out_layer, in_lock]) p = Pool(6) processed = p.starmap(process_pt, feats_to_process) p.close() p.join() else: processed = [] # Loop through features for pt in points: processed.append(process_pt(pt, out_layer, Lock())) if args.remove: for proc in processed: for data in proc.datasets: try: os.remove(data.location) except: pass out_ds = None print("Analysis Complete, took %s seconds." % (time.time() - start_time))