# Use this workflow if the TIFF files haven't been downloaded import sys import os from osgeo import gdal, ogr, osr from Raster_Analysis_Download import find_raster_summit from LPC_Analysis import get_lpc_point from Output import set_output_feature, tnm_api_download_raster, TNMAPIException from Point import Point import time import pathlib import Input import traceback from multiprocessing.pool import ThreadPool as Pool from multiprocessing import Manager, Lock # , Pool from download_all_tiffs import download_all def truncate(num, n): integer = int(num * (10**n))/(10**n) return float(integer) def get_bbox(dx, dy, in_srs): """ Args: dx: x coordinate of point you want the bbox for dy: y coordinate of point you want the bbox for in_srs: Input spatial reference """ # Transform from Web Mercator to WGS84 if str(in_srs.GetAttrValue("Unit")).upper() != "DEGREE": targetSR = osr.SpatialReference() targetSR.ImportFromEPSG(4326) # WGS84 targetSR.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) trans_point = ogr.Geometry(ogr.wkbPoint) trans_point.AddPoint(dx, dx) # should this be dx twice? trans_point.AssignSpatialReference(in_srs) trans_point.TransformTo(targetSR) bbox_xmin = trans_point.GetX() bbox_xmax = trans_point.GetX() bbox_ymin = trans_point.GetY() bbox_ymax = trans_point.GetY() else: # Build bbox bbox_xmin = dx bbox_xmax = dx bbox_ymin = dy bbox_ymax = dy return "{},{},{},{}".format(bbox_xmin, bbox_ymin, bbox_xmax, bbox_ymax) def process_pt(in_pt, output_layer, in_lock): """ Args: in_pt: original coordinates for the summit output_layer: output datasource? in_lock: manager for multiprocessing Returns: in_pt as a Point and with information about the location of the new summit and the datasets that were used to find it """ try: in_pt = Point(in_pt[0], in_pt[1], in_pt[2], in_pt[3]) # Get the 1/3 dataset from tnm with in_lock: print("Processing summit: " + in_pt.name) # Get points bbox based on most recent data srs = output_layer.GetSpatialRef() bbox = in_pt.get_bbox(srs) # Get the dataset object resolution = "National Elevation Dataset (NED) 1/3 arc-second" dataset = tnm_api_download_raster(bbox, out_loc, resolution, in_lock, False, args.debug) # Add dataset object to list of points datasets if not dataset: print("Dataset list index out of range issue") else: in_pt.datasets.append(dataset) current_ds = in_pt.datasets[-1] # Find the summit dx = dy = raster_srs = added = None try: dx, dy, raster_srs, added = find_raster_summit(in_pt, buffer, srs, in_lock) except Exception as e: with in_lock: print(str(e)) while dx and dy and raster_srs: # Get bbox from pixel coordinates to try and pull next tile bbox = get_bbox(dx, dy, raster_srs) # returned in raster SRS, used to transform to lat/lon # use bbox to query TNM for a tile that it intersects new_dataset = tnm_api_download_raster(bbox, out_loc, resolution, in_lock, True, args.debug, dataset.bad_location) if not new_dataset: break # if the return has a different tile name we want to store it to check already_in = False for data_file in current_ds.location: if data_file == new_dataset.location[0]: already_in = True break if not already_in: in_pt.datasets[-1].location.append(new_dataset.location[0]) try: dx, dy, raster_srs, added = find_raster_summit(in_pt, buffer, srs, in_lock, added) except Exception as e: dx = dy = raster_srs = added = None with in_lock: print(str(e)) traceback.print_exc() # continue with higher resolution data? if download: # TODO: Test next resolution multi-tile point finding # Get new bbox based on newest data bbox = in_pt.get_bbox(srs) # Check for next resolution resolution = in_pt.get_next_resolution() if resolution: # download data dataset = tnm_api_download_raster(bbox, out_loc, resolution, in_lock, False, args.debug) in_pt.datasets.append(dataset) if dataset: # Find the summit try: dx, dy, raster_srs, added = find_raster_summit(in_pt, buffer, srs, in_lock) except Exception as e: dx = dy = raster_srs = added = None with in_lock: print(str(e)) traceback.print_exc() while dx and dy and raster_srs: bbox = get_bbox(dx, dy, raster_srs) # Try downloading files that aren't in the "bad list" new_dataset = tnm_api_download_raster(bbox, out_loc, resolution, in_lock, True, args.debug, dataset.bad_location) # if the return has a different tile name we want to store it to check if new_dataset: already_in = False for data_file in dataset.location: if data_file == new_dataset.location[0]: already_in = True break if not already_in: in_pt.datasets[-1].location.append(new_dataset.location[0]) try: dx, dy, raster_srs, added = find_raster_summit(in_pt, buffer, srs, in_lock, added) except Exception as e: dx = dy = raster_srs = added = None with in_lock: print(str(e)) traceback.print_exc() resolution = in_pt.get_next_resolution() # TODO: re work the LPC if resolution == 'Lidar Point Cloud (LPC)': pdal_bbox = in_pt.get_pdal_bbox() # download data dataset = tnm_api_download_raster(bbox, out_loc, resolution, in_feature_class_srs, args.debug) # Add dataset object to list of points datasets in_pt.datasets.append(dataset) if in_pt.get_most_recent_data(): get_lpc_point(in_pt, in_pt.get_most_recent_data().location, pdal_bbox) else: with in_lock: print("Last dataset resolution available for summit {0}: {1}". format(in_pt.name, in_pt.datasets.datasets[-1].type)) except TNMAPIException as e: with in_lock: print(e) traceback.print_exc() except Exception as e: with in_lock: print(e) traceback.print_exc() finally: # do we want this in a finally clause? It'll run even if there are exceptions # Create the output feature feature = ogr.Feature(output_layer.GetLayerDefn()) # Set the data for the output feature and create it with in_lock: if in_pt.original_x and in_pt.original_y: set_output_feature(feature, in_pt, output_layer) print("Summit processed for: " + in_pt.name) else: print("Process failed for: " + in_pt.name) with open("no_orig_coords.txt", 'w') as file: file.write(in_pt.name + '\n') feature = None return in_pt if __name__ == '__main__': start_time = time.time() # Stop GDAL printing both warnings and errors to STDERR # gdal.PushErrorHandler('CPLQuietErrorHandler') # Make GDAL raise python exceptions for errors (warnings won't raise an exception) gdal.UseExceptions() gdal.AllRegister() # Parse arguments try: args = Input.parse_input_args(sys.argv[1:]) except: sys.exit() buffer = args.buffer download = args.download # Location to save downloaded file(s) # out_loc = pathlib.Path(args.output_shapefile).parent.resolve() out_loc = "full_test/TIFFs" # Get some info from the input feature class we will need later in_fc_driver = ogr.GetDriverByName("ESRI Shapefile") in_fc_dataSource = in_fc_driver.Open(args.input_feature_class, 0) in_fc_layer = in_fc_dataSource.GetLayer() in_feature_class_srs = in_fc_layer.GetSpatialRef() in_fc_lyr_def = in_fc_layer.GetLayerDefn() # Create our output: out_ds = in_fc_driver.CreateDataSource(args.output_shapefile) # TODO: determine point type out_layer = out_ds.CreateLayer("Corrected_Summits", in_feature_class_srs, ogr.wkbPoint) # Copy fields from the input for i in range(in_fc_lyr_def.GetFieldCount()): out_layer.CreateField(in_fc_lyr_def.GetFieldDefn(i)) # create dict structure to hold point values field_dict = {} for i in range(out_layer.GetLayerDefn().GetFieldCount()): field_dict[out_layer.GetLayerDefn().GetFieldDefn(i).GetName()] = None print("creating fields") # Create a distance field out_layer.CreateField(ogr.FieldDefn("Distance", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("Orig_Z", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("Orig_X", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("Orig_Y", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("13_Z_Value", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("5m_Z_Value", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("1m_Z_Value", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("LPC_Z_Valu", ogr.OFTReal)) # e left off name for 10 char limit out_layer.CreateField(ogr.FieldDefn("final_z", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("Final_Res", ogr.OFTString)) out_layer.CreateField(ogr.FieldDefn("ZDiff", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("final_X", ogr.OFTReal)) out_layer.CreateField(ogr.FieldDefn("final_Y", ogr.OFTReal)) points = [] # Loop through features and add to list of points for feat in in_fc_layer: # Shallow copy fields from input layerdefn with empty values point_dict = field_dict.copy() # Iterate through fields and fill with feature values for i in range(feat.GetFieldCount()): point_dict[in_fc_lyr_def.GetFieldDefn(i).name] = feat.GetField(i) sourceSR = in_fc_layer.GetSpatialRef() geom = feat.GetGeometryRef() if str(geom.GetGeometryName()) == "MULTIPOINT": # find unique ID u_id = point_dict['gaz_id'] for inner_geom in geom: # point = Point(inner_geom.GetX(), inner_geom.GetY(), point_dict, multi=u_id) # points.append(point) points.append([inner_geom.GetX(), inner_geom.GetY(), point_dict, u_id]) else: # point = Point(geom.GetX(), geom.GetY(), point_dict) # points.append(point) points.append([geom.GetX(), geom.GetY(), point_dict, None]) if args.multi: # multi-process points with Manager() as manager: in_lock = manager.Lock() feats_to_process = [] # Loop through features for pt in points: feats_to_process.append([pt, out_layer, in_lock]) p = Pool(6) processed = p.starmap(process_pt, feats_to_process) p.close() p.join() else: processed = [] # Loop through features for pt in points: # processed.append(process_pt(pt, out_layer, Lock())) base = "full_test/TIFFs/" temp = Point(pt[0], pt[1], pt[2], pt[3]) srs = out_layer.GetSpatialRef() bbox = temp.get_bbox(srs) print(f"\nDownloading tiles for {pt[2]['gaz_name']}") download_all(bbox, [base+'13', base+'1m', base+'5m', base+'lpc']) if args.remove: for proc in processed: for data in proc.datasets: try: os.remove(data.location) except: pass out_ds = None print("Analysis Complete, took %s seconds." % (time.time() - start_time))