Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ jobs:
id: 'auth'
uses: 'google-github-actions/auth@v2'
with:
project_id: 'openet'
create_credentials_file: true
workload_identity_provider: 'projects/470570065811/locations/global/workloadIdentityPools/gitaction-pool/providers/gitaction-provider'
service_account: 'github-actions@openet.iam.gserviceaccount.com'
Expand Down
60 changes: 36 additions & 24 deletions openet/core/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,17 +174,19 @@ def sentinel2_sr_cloud_mask(input_img):
return cloud_mask.Not().rename(['cloud_mask'])


def landsat_c2_sr_lst_correct(sr_image, ndvi):
def landsat_c2_sr_lst_correct(input_img, ndvi=None):
"""Apply correction to Collection 2 LST using adjusted ASTER emissivity

Parameters
----------
sr_image : ee.Image
input_img : ee.Image
Image from a Landsat Collection 2 SR image collection
with the SPACECRAFT_ID and LANDSAT_PRODUCT_ID metadata properties
(e.g. LANDSAT/LC08/C02/T1_L2).
(e.g. LANDSAT/LC08/C02/T1_L2). The image itself is not read in this
function but is instead used to select from the Level 2 and TOA collections.
ndvi : ee.Image
Normalized difference vegetation index (NDVI)
This parameter is deprecated and NDVI will be computed internally in the function,
but leaving to support backwards compatibility.

Returns
-------
Expand All @@ -206,10 +208,10 @@ def landsat_c2_sr_lst_correct(sr_image, ndvi):
Hulley, G. 2023.

"""
spacecraft_id = ee.String(sr_image.get('SPACECRAFT_ID'))
spacecraft_id = ee.String(input_img.get('SPACECRAFT_ID'))

# Landsat image geometry for clipping ASTER GED
image_geom = sr_image.geometry()
image_geom = input_img.geometry()
image_extent = image_geom.bounds(1, 'EPSG:4326')

# Server side approach for getting image extent snapped to the ASTER GED grid
Expand Down Expand Up @@ -257,7 +259,7 @@ def landsat_c2_sr_lst_correct(sr_image, ndvi):
'LANDSAT_8': 0.0584, 'LANDSAT_9': 0.0457,
})

def get_matched_c2_t1_image(input_img):
def get_matched_c2_t1_l2_image(input_img):
# Find matching Landsat Collection 2 Tier 1 Level 2 image
# based on the "LANDSAT_PRODUCT_ID" property
# Build the system:index format scene ID from the LANDSAT_PRODUCT_ID
Expand All @@ -284,7 +286,7 @@ def get_matched_c2_t1_image(input_img):
)

def get_matched_c2_t1_radiance_image(input_img):
# Find matching Landsat Collection 2 Tier 1 Level 2 image
# Find matching Landsat Collection 2 Tier 1 radiance image
# based on the "LANDSAT_PRODUCT_ID" property
# Build the system:index format scene ID from the LANDSAT_PRODUCT_ID
satellite = ee.String(input_img.get('SPACECRAFT_ID'))
Expand Down Expand Up @@ -349,10 +351,21 @@ def get_matched_c2_t1_radiance_image(input_img):
})
)

# Rebuilding the coll2 image here since the extra bands needed for the calculation
# will likely have been dropped or excluded before getting to this function
coll2 = get_matched_c2_t1_image(sr_image)
coll2RT = get_matched_c2_t1_radiance_image(sr_image)
# Rebuilding the coll2 image here from the LANDSAT_PRODUCT_ID
# since the extra bands needed for the calculation will likely
# have been dropped or excluded before getting to this function
c02_lvl2_img = get_matched_c2_t1_l2_image(input_img)
c02_rad_img = get_matched_c2_t1_radiance_image(input_img)

# Compute NDVI from the Level 2 SR image
# Including the global surface water maximum extent to limit the water mask
# to only those areas that have been flagged as water at some point in time
# which should help remove shadows that are misclassified as water
ndvi = landsat.c02_sr_ndvi(
sr_img=landsat.c02_l2_sr(c02_lvl2_img),
water_mask=landsat.c02_qa_water_mask(c02_lvl2_img),
gsw_extent_flag=True
)

# Apply Allen-Kilic Eq. 5 to calc. ASTER emiss. for Landsat
# This is Eq. 4 of Malakar et al., 2018
Expand Down Expand Up @@ -398,27 +411,26 @@ def get_matched_c2_t1_radiance_image(input_img):
# Using the ASTER-based soil emissivity from above
# The following estimate for emissivity to use with Landsat may need to be clamped
# to some predefined safe limits (for example, 0.5 and 1.0).
# CGM - Is the .multiply(1.0) needed?
fc_landsat = ndvi.multiply(1.0).subtract(0.15).divide(0.65).clamp(0, 1.0)
fc_landsat = ndvi.subtract(0.15).divide(0.65).clamp(0, 1.0)

# calc_smoothed_em_soil
LS_EM = fc_landsat.multiply(-1).add(1).multiply(em_soil).add(fc_landsat.multiply(veg_emis))
ls_em = fc_landsat.multiply(-1).add(1).multiply(em_soil).add(fc_landsat.multiply(veg_emis))

# Apply Eq. 8 to get thermal surface radiance, Rc, from C2 Real time band 10
# (Eq. 7 of Malakar et al. but without the emissivity to produce actual radiance)
Rc = (
coll2RT.select(['thermal'])
.multiply(ee.Number(coll2RT.get('RADIANCE_MULT_BAND_thermal')))
.add(ee.Number(coll2RT.get('RADIANCE_ADD_BAND_thermal')))
.subtract(coll2.select(['ST_URAD']).multiply(0.001))
.divide(coll2.select(['ST_ATRAN']).multiply(0.0001))
.subtract(LS_EM.multiply(-1).add(1).multiply(coll2.select(['ST_DRAD']).multiply(0.001)))
rc = (
c02_rad_img.select(['thermal'])
.multiply(ee.Number(c02_rad_img.get('RADIANCE_MULT_BAND_thermal')))
.add(ee.Number(c02_rad_img.get('RADIANCE_ADD_BAND_thermal')))
.subtract(c02_lvl2_img.select(['ST_URAD']).multiply(0.001))
.divide(c02_lvl2_img.select(['ST_ATRAN']).multiply(0.0001))
.subtract(ls_em.multiply(-1).add(1).multiply(c02_lvl2_img.select(['ST_DRAD']).multiply(0.001)))
)

# Apply Eq. 7 to convert Rs to LST (similar to Malakar et al., but with emissivity)
return (
LS_EM.multiply(ee.Number(k1.get(spacecraft_id)))
.divide(Rc).add(1.0).log().pow(-1)
ls_em.multiply(ee.Number(k1.get(spacecraft_id)))
.divide(rc).add(1.0).log().pow(-1)
.multiply(ee.Number(k2.get(spacecraft_id)))
.rename('lst')
)
144 changes: 144 additions & 0 deletions openet/core/export.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,144 @@
import logging

import ee

from . import utils


def mgrs_export_tiles(
study_area_coll_id,
mgrs_coll_id,
study_area_property=None,
study_area_features=[],
mgrs_tiles=[],
mgrs_skip_list=[],
utm_zones=[],
wrs2_tiles=[],
mgrs_property='mgrs',
utm_property='utm',
wrs2_property='wrs2',
cell_size=30,
):
"""Select MGRS tiles and metadata that intersect the study area geometry

Parameters
----------
study_area_coll_id : str
Study area feature collection asset ID.
mgrs_coll_id : str
MGRS feature collection asset ID.
study_area_property : str, optional
Property name to use for inList() filter call of study area collection.
Filter will only be applied if both 'study_area_property' and
'study_area_features' parameters are both set.
study_area_features : list, optional
List of study area feature property values to filter on.
mgrs_tiles : list, optional
User defined MGRS tile subset.
mgrs_skip_list : list, optional
User defined list MGRS tiles to skip.
utm_zones : list, optional
User defined UTM zone subset.
wrs2_tiles : list, optional
User defined WRS2 tile subset.
mgrs_property : str, optional
MGRS property in the MGRS feature collection (the default is 'mgrs').
utm_property : str, optional
UTM zone property in the MGRS feature collection (the default is 'utm').
wrs2_property : str, optional
WRS2 property in the MGRS feature collection (the default is 'wrs2').
cell_size : float, optional
Cell size for transform and shape calculation (the default is 30).

Returns
------
list of dicts: export information

"""
# Build and filter the study area feature collection
logging.debug('Building study area collection')
logging.debug(f' {study_area_coll_id}')
study_area_coll = ee.FeatureCollection(study_area_coll_id)
if ((study_area_property == 'STUSPS') and
('CONUS' in [x.upper() for x in study_area_features])):
# Exclude AK, HI, AS, GU, PR, MP, VI, (but keep DC)
study_area_features = [
'AL', 'AR', 'AZ', 'CA', 'CO', 'CT', 'DC', 'DE', 'FL', 'GA',
'IA', 'ID', 'IL', 'IN', 'KS', 'KY', 'LA', 'MA', 'MD', 'ME',
'MI', 'MN', 'MO', 'MS', 'MT', 'NC', 'ND', 'NE', 'NH', 'NJ',
'NM', 'NV', 'NY', 'OH', 'OK', 'OR', 'PA', 'RI', 'SC', 'SD',
'TN', 'TX', 'UT', 'VA', 'VT', 'WA', 'WI', 'WV', 'WY',
]
study_area_features = sorted(list(set(study_area_features)))

if study_area_property and study_area_features:
logging.debug(' Filtering study area collection')
logging.debug(f' Property: {study_area_property}')
logging.debug(f' Features: {",".join(study_area_features)}')
study_area_coll = study_area_coll.filter(
ee.Filter.inList(study_area_property, study_area_features)
)

logging.debug('Building MGRS tile list')
tiles_coll = ee.FeatureCollection(mgrs_coll_id).filterBounds(study_area_coll.geometry())

# Filter collection by user defined lists
if utm_zones:
logging.debug(f' Filter user UTM Zones: {utm_zones}')
tiles_coll = tiles_coll.filter(ee.Filter.inList(utm_property, utm_zones))
if mgrs_skip_list:
logging.debug(f' Filter MGRS skip list: {mgrs_skip_list}')
tiles_coll = tiles_coll.filter(ee.Filter.inList(mgrs_property, mgrs_skip_list).Not())
if mgrs_tiles:
logging.debug(f' Filter MGRS tiles/zones: {mgrs_tiles}')
# Allow MGRS tiles to be subsets of the full tile code
# i.e. mgrs_tiles = 10TE, 10TF
mgrs_filters = [
ee.Filter.stringStartsWith(mgrs_property, mgrs_id.upper())
for mgrs_id in mgrs_tiles
]
tiles_coll = tiles_coll.filter(ee.call('Filter.or', mgrs_filters))

# Drop the MGRS tile geometry to simplify the getInfo call
def drop_geometry(ftr):
return ee.Feature(None).copyProperties(ftr)

logging.debug(' Requesting tile/zone info')
tiles_info = utils.get_info(tiles_coll.map(drop_geometry))

# Constructed as a list of dicts to mimic other interpolation/export tools
tiles_list = []
for tile_ftr in tiles_info['features']:
mgrs_id = tile_ftr['properties']['mgrs'].upper()
tile_extent = [
int(tile_ftr['properties']['xmin']),
int(tile_ftr['properties']['ymin']),
int(tile_ftr['properties']['xmax']),
int(tile_ftr['properties']['ymax'])
]
tile_geo = [cell_size, 0, tile_extent[0], 0, -cell_size, tile_extent[3]]
tile_shape = [
int((tile_extent[2] - tile_extent[0]) / cell_size),
int((tile_extent[3] - tile_extent[1]) / cell_size)
]
tiles_list.append({
'crs': 'EPSG:{:d}'.format(int(tile_ftr['properties']['epsg'])),
'extent': tile_extent,
'geo': tile_geo,
'index': tile_ftr['properties']['mgrs'].upper(),
'maxpixels': tile_shape[0] * tile_shape[1] + 1,
'shape': tile_shape,
'utm': int(mgrs_id[:2]),
'wrs2_tiles': sorted(utils.wrs2_str_2_set(tile_ftr['properties'][wrs2_property])),
})

# Apply the user defined WRS2 tile list
if wrs2_tiles:
logging.debug(f' Filter WRS2 tiles: {wrs2_tiles}')
for tile in tiles_list:
tile['wrs2_tiles'] = sorted(list(set(tile['wrs2_tiles']) & set(wrs2_tiles)))

# Only return export tiles that have intersecting WRS2 tiles
export_list = [t for t in sorted(tiles_list, key=lambda k: k['index']) if t['wrs2_tiles']]

return export_list
26 changes: 13 additions & 13 deletions openet/core/interpolate.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import datetime
from datetime import datetime, timedelta
import logging

from dateutil.relativedelta import relativedelta
Expand Down Expand Up @@ -444,20 +444,20 @@ def from_scene_et_fraction(

# Adjust start/end dates based on t_interval
# Increase the date range to fully include the time interval
start_dt = datetime.datetime.strptime(start_date, '%Y-%m-%d')
end_dt = datetime.datetime.strptime(end_date, '%Y-%m-%d')
start_dt = datetime.strptime(start_date, '%Y-%m-%d')
end_dt = datetime.strptime(end_date, '%Y-%m-%d')
if t_interval.lower() == 'monthly':
start_dt = datetime.datetime(start_dt.year, start_dt.month, 1)
start_dt = datetime(start_dt.year, start_dt.month, 1)
end_dt -= relativedelta(days=+1)
end_dt = datetime.datetime(end_dt.year, end_dt.month, 1)
end_dt = datetime(end_dt.year, end_dt.month, 1)
end_dt += relativedelta(months=+1)
start_date = start_dt.strftime('%Y-%m-%d')
end_date = end_dt.strftime('%Y-%m-%d')

# The start/end date for the interpolation include more days
# (+/- interp_days) than are included in the ETr collection
interp_start_dt = start_dt - datetime.timedelta(days=interp_days)
interp_end_dt = end_dt + datetime.timedelta(days=interp_days)
interp_start_dt = start_dt - timedelta(days=interp_days)
interp_end_dt = end_dt + timedelta(days=interp_days)
interp_start_date = interp_start_dt.date().isoformat()
interp_end_date = interp_end_dt.date().isoformat()

Expand Down Expand Up @@ -914,20 +914,20 @@ def from_scene_et_actual(

# Adjust start/end dates based on t_interval
# Increase the date range to fully include the time interval
start_dt = datetime.datetime.strptime(start_date, '%Y-%m-%d')
end_dt = datetime.datetime.strptime(end_date, '%Y-%m-%d')
start_dt = datetime.strptime(start_date, '%Y-%m-%d')
end_dt = datetime.strptime(end_date, '%Y-%m-%d')
if t_interval.lower() == 'monthly':
start_dt = datetime.datetime(start_dt.year, start_dt.month, 1)
start_dt = datetime(start_dt.year, start_dt.month, 1)
end_dt -= relativedelta(days=+1)
end_dt = datetime.datetime(end_dt.year, end_dt.month, 1)
end_dt = datetime(end_dt.year, end_dt.month, 1)
end_dt += relativedelta(months=+1)
start_date = start_dt.strftime('%Y-%m-%d')
end_date = end_dt.strftime('%Y-%m-%d')

# The start/end date for the interpolation include more days
# (+/- interp_days) than are included in the ETr collection
interp_start_dt = start_dt - datetime.timedelta(days=interp_days)
interp_end_dt = end_dt + datetime.timedelta(days=interp_days)
interp_start_dt = start_dt - timedelta(days=interp_days)
interp_end_dt = end_dt + timedelta(days=interp_days)
interp_start_date = interp_start_dt.date().isoformat()
interp_end_date = interp_end_dt.date().isoformat()

Expand Down
Loading