# grab essentially the entirety of tnm_download_api and alter so that for each point it first grabs all the 13 data # then it should grab the 5m, and so on and so forth. All of those get downloaded in their separately marked folders # then, I either write another script that merges them all together or I use qgis to get it done -> script would be better # in the sense that with qgis if there is an issue with a file it will break but if issue with gdal merge it will just # skip that file. # however i think merging in qgis I wouldn't have to worry about warping to the right SRS? not sure # Next, modify process_pt so that instead of pinging an api every time it sets the dataset to the related vrt/mosaic file # I create for each data type. # raster_analysis will also need a face lift, though hopefully this won't cause the same GNIS=NDV problems # then to run this all together I add the stuff from summit_analysis main to iterate through each point for downloading # their tiffs. import time import requests import urllib.request import datetime from requests.packages.urllib3.exceptions import InsecureRequestWarning import os requests.packages.urllib3.disable_warnings(InsecureRequestWarning) class NoData(Exception): def __init__(self, url, message="No data found from "): self.url = url super(NoData, self).__init__(message + self.url) class TNMAPIException(Exception): def __init__(self, response, message='TNM API Bad Response: '): self.response_code = str(response) super(TNMAPIException, self).__init__(message + self.response_code) def sort_funct(e): return datetime.datetime.strptime(e["publicationDate"], "%Y-%m-%d") def download_all(bbox, out_loc, data_types=("National Elevation Dataset (NED) 1/3 arc-second","Digital Elevation Model (DEM) 1 meter","Alaska IFSAR 5 meter DEM","Lidar Point Cloud (LPC)")): debug = True pointX, pointY, _, _ = bbox.split(',') url = "https://tnmaccess.nationalmap.gov/api/v1/products" for data_type, out in zip(data_types, out_loc): print(f"starting search for {data_type}") parameters = {"bbox": bbox, "datasets": data_type} num_bad_response = 0 while True: try: response = requests.get(url, params=parameters, verify=False) if not response.ok: num_bad_response += 1 if num_bad_response > 20: raise TNMAPIException(response.status_code) else: if debug: print("URL: " + response.request.url) try: data = response.json() except Exception as bad_response: print(bad_response) return None if data['total'] == 0: raise NoData(response.request.url) data['items'].sort(key=sort_funct) for i in range(0, len(data['items'])): if not data['items'][i]['downloadURL'].endswith('.tif') and not data['items'][i]['downloadURL'].endswith('.laz'): continue if float(data['items'][i]['boundingBox']['minX']) < float(pointX) < \ float(data['items'][i]['boundingBox']['maxX']) and \ float(data['items'][i]['boundingBox']['minY']) < float(pointY) < \ float(data['items'][i]['boundingBox']['maxY']): download_url = data['items'][i]['downloadURL'] title = data['items'][i]['title'].replace(" ", "_") if '1/3' in title: title = title.replace('1/3', '13') if data_type == "Lidar Point Cloud (LPC)": downloaded_raster = os.path.join(out, title + ".laz") else: downloaded_raster = os.path.join(out, title + ".tif") if not os.path.isfile(downloaded_raster): print("Downloading dataset: " + downloaded_raster) _, http_message = urllib.request.urlretrieve(download_url, downloaded_raster) else: print("Dataset: " + downloaded_raster + " found in output location.") except TNMAPIException as e: if debug: print(e) except NoData as e: if debug: print(e) except: time.sleep(1) else: if response.status_code == 400: time.sleep(30) break