|
| 1 | +# |
| 2 | +# Test for landsat stac server |
| 3 | +import geopandas as gpd |
| 4 | +import requests as r |
| 5 | +import boto3 |
| 6 | +import rasterio as rio |
| 7 | +import rioxarray |
| 8 | +import os |
| 9 | +from rasterio.session import AWSSession |
| 10 | + |
| 11 | +def BuildSquare(lon, lat, delta): |
| 12 | + c1 = [lon + delta, lat + delta] |
| 13 | + c2 = [lon + delta, lat - delta] |
| 14 | + c3 = [lon - delta, lat - delta] |
| 15 | + c4 = [lon - delta, lat + delta] |
| 16 | + geometry = {"type": "Polygon", "coordinates": [[ c1, c2, c3, c4, c1 ]]} |
| 17 | + return geometry |
| 18 | + |
| 19 | + |
| 20 | +s3_cred_endpoint = { |
| 21 | + 'lpdaac':'https://data.lpdaac.earthdatacloud.nasa.gov/s3credentials' |
| 22 | +} |
| 23 | + |
| 24 | +def get_temp_creds(provider): |
| 25 | + return r.get(s3_cred_endpoint[provider]).json() |
| 26 | + |
| 27 | + |
| 28 | + |
| 29 | +############################################################################### |
| 30 | +# MAIN |
| 31 | +############################################################################### |
| 32 | + |
| 33 | +if __name__ == '__main__': |
| 34 | + |
| 35 | + stac = 'https://cmr.earthdata.nasa.gov/stac/' # CMR-STAC API Endpoint |
| 36 | + stac_response = r.get(stac).json() # Call the STAC API endpoint |
| 37 | + |
| 38 | + # for s in stac_response: print(s) |
| 39 | + |
| 40 | + print(f"You are now using the {stac_response['id']} API (STAC Version: {stac_response['stac_version']}). \n{stac_response['description']}") |
| 41 | + print(f"There are {len(stac_response['links'])} STAC catalogs available in CMR.") |
| 42 | + |
| 43 | + stac_lp = [s for s in stac_response['links'] if 'LP' in s['title']] # Search for only LP-specific catalogs |
| 44 | + |
| 45 | + # LPCLOUD is the STAC catalog we will be using and exploring today |
| 46 | + lp_cloud = r.get([s for s in stac_lp if s['title'] == 'LPCLOUD'][0]['href']).json() |
| 47 | + # for l in lp_cloud: print(f"{l}: {lp_cloud[l]}") |
| 48 | + |
| 49 | + lp_links = lp_cloud['links'] |
| 50 | + for l in lp_links: |
| 51 | + try: |
| 52 | + print(f"{l['href']} is the {l['title']}") |
| 53 | + except: |
| 54 | + print(f"{l['href']}") |
| 55 | + |
| 56 | + |
| 57 | + lp_collections = [l['href'] for l in lp_links if l['rel'] == 'collections'][0] # Set collections endpoint to variable |
| 58 | + collections_response = r.get(f"{lp_collections}").json() # Call collections endpoint |
| 59 | + print(f"This collection contains {collections_response['description']} ({len(collections_response['collections'])} available)") |
| 60 | + |
| 61 | + collections = collections_response['collections'] |
| 62 | + # print(collections[1]) |
| 63 | + |
| 64 | + # Search available version 2 collections for HLS and print them out |
| 65 | + hls_collections = [c for c in collections if 'HLS' in c['id'] and 'v2' in c['id']] |
| 66 | + for h in hls_collections: |
| 67 | + print(f"{h['title']} has an ID (shortname) of: {h['id']}") |
| 68 | + |
| 69 | + l30 = [h for h in hls_collections if 'HLSL30' in h['id'] and 'v2.0' in h['id']][0] # Grab HLSL30 collection |
| 70 | + for l in l30['extent']: # Check out the extent of this collection |
| 71 | + print(f"{l}: {l30['extent'][l]}") |
| 72 | + |
| 73 | + l30_id = 'HLSL30.v2.0' |
| 74 | + print(f"HLS L30 Start Date is: {l30['extent']['temporal']['interval'][0][0]}") |
| 75 | + |
| 76 | + l30_items = [l['href'] for l in l30['links'] if l['rel'] == 'items'][0] # Set items endpoint to variable |
| 77 | + print(l30_items) |
| 78 | + l30_items_response = r.get(f"{l30_items}").json() # Call items endpoint |
| 79 | + # print(l30_items_response) |
| 80 | + # l30_item = l30_items_response['features'][0] # select first item (10 items returned by default) |
| 81 | + # print(l30_item) |
| 82 | + |
| 83 | + # for i, l in enumerate(l30_items_response['features']): |
| 84 | + # print(f"Item at index {i} is {l['id']}") |
| 85 | + # print(f"Item at index {i} is {l['properties']['eo:cloud_cover']}% cloudy.") |
| 86 | + |
| 87 | + lp_search = [l['href'] for l in lp_links if l['rel'] == 'search'][0] # Define the search endpoint |
| 88 | + # lp_search is https://cmr.earthdata.nasa.gov/stac/LPCLOUD/search |
| 89 | + |
| 90 | + # Set up a dictionary that will be used to POST requests to the search endpoint |
| 91 | + params = {} |
| 92 | + |
| 93 | + lim = 100 |
| 94 | + params['limit'] = lim # Add in a limit parameter to retrieve 100 items at a time. |
| 95 | + print(params) |
| 96 | + search_response = r.post(lp_search, json=params).json() # send POST request to retrieve first 100 items in the STAC collection |
| 97 | + print(f"{len(search_response['features'])} items found!") |
| 98 | + |
| 99 | + # Bring in the farm field region of interest |
| 100 | + field = gpd.read_file('/home/elidwa/hls-tutorial/Field_Boundary.geojson') |
| 101 | + print(field) |
| 102 | + fieldShape = field['geometry'][0] # Define the geometry as a shapely polygon |
| 103 | + bbox = f'{fieldShape.bounds[0]},{fieldShape.bounds[1]},{fieldShape.bounds[2]},{fieldShape.bounds[3]}' # Defined from ROI bounds |
| 104 | + params['bbox'] = bbox # Add ROI to params |
| 105 | + date_time = "2021-07-01T00:00:00Z/2021-08-31T23:59:59Z" # Define start time period / end time period |
| 106 | + params['datetime'] = date_time |
| 107 | + params['collections'] = l30_id |
| 108 | + |
| 109 | + hls_items = r.post(lp_search, json=params).json() # Send POST request with datetime included |
| 110 | + print(f"{len(hls_items['features'])} items found!") |
| 111 | + hls_items = hls_items['features'] |
| 112 | + |
| 113 | + h = hls_items[0] |
| 114 | + # print(h) |
| 115 | + |
| 116 | + evi_band_links = [] |
| 117 | + |
| 118 | + # Define which HLS product is being accessed |
| 119 | + # if h['assets']['browse']['href'].split('/')[4] == 'HLSS30.015': |
| 120 | + # evi_bands = ['B8A', 'B04', 'B02', 'Fmask'] # NIR RED BLUE Quality for S30 |
| 121 | + # else: |
| 122 | + # evi_bands = ['B05', 'B04', 'B02', 'Fmask'] # NIR RED BLUE Quality for L30 |
| 123 | + |
| 124 | + evi_bands = ['B05', 'B04', 'B02', 'Fmask'] # NIR RED BLUE Quality for L30 |
| 125 | + |
| 126 | + # Subset the assets in the item down to only the desired bands |
| 127 | + for a in h['assets']: |
| 128 | + if any(b == a for b in evi_bands): |
| 129 | + evi_band_links.append(h['assets'][a]['href']) |
| 130 | + for e in evi_band_links: print(e) |
| 131 | + |
| 132 | +# The result of this seach is: |
| 133 | +# https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T10TEK.2021183T185121.v2.0/HLS.L30.T10TEK.2021183T185121.v2.0.B02.tif |
| 134 | +# https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T10TEK.2021183T185121.v2.0/HLS.L30.T10TEK.2021183T185121.v2.0.Fmask.tif |
| 135 | +# https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T10TEK.2021183T185121.v2.0/HLS.L30.T10TEK.2021183T185121.v2.0.B04.tif |
| 136 | +# https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T10TEK.2021183T185121.v2.0/HLS.L30.T10TEK.2021183T185121.v2.0.B05.tif |
| 137 | +# |
| 138 | +# create an s3 list |
| 139 | + s3List = [] |
| 140 | + |
| 141 | + for e in evi_band_links: |
| 142 | + # print(e) |
| 143 | + s3path = e.replace("https://data.lpdaac.earthdatacloud.nasa.gov/", "s3://") |
| 144 | + # print(s3path) |
| 145 | + s3List.append(s3path) |
| 146 | + |
| 147 | + for e in s3List: |
| 148 | + print(e) |
| 149 | + |
| 150 | + |
| 151 | + if os.path.isfile(os.path.expanduser('~/.netrc')): |
| 152 | + # For Githhub CI, we can use ~/.netrc |
| 153 | + temp_creds_req = get_temp_creds('lpdaac') |
| 154 | + else: |
| 155 | + # ADD temporary credentials here |
| 156 | + temp_creds_req = {} |
| 157 | + |
| 158 | + session = boto3.Session(aws_access_key_id=temp_creds_req['accessKeyId'], |
| 159 | + aws_secret_access_key=temp_creds_req['secretAccessKey'], |
| 160 | + aws_session_token=temp_creds_req['sessionToken'], |
| 161 | + region_name='us-west-2') |
| 162 | + |
| 163 | + |
| 164 | + # NOTE: Using rioxarray assumes you are accessing a GeoTIFF |
| 165 | + rio_env = rio.Env(AWSSession(session), |
| 166 | + GDAL_DISABLE_READDIR_ON_OPEN='TRUE', |
| 167 | + GDAL_HTTP_COOKIEFILE=os.path.expanduser('~/cookies.txt'), |
| 168 | + GDAL_HTTP_COOKIEJAR=os.path.expanduser('~/cookies.txt')) |
| 169 | + rio_env.__enter__() |
| 170 | + |
| 171 | + |
| 172 | + for e in s3List: |
| 173 | + print(e) |
| 174 | + if '.tif' in e: |
| 175 | + da = rioxarray.open_rasterio(e) |
| 176 | + print(da) |
| 177 | + |
| 178 | + |
| 179 | + print("Done!") |
0 commit comments