Initial commit
This commit is contained in:
23
Flexbrdf/hytools/io/__init__.py
Normal file
23
Flexbrdf/hytools/io/__init__.py
Normal file
@ -0,0 +1,23 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
HyTools: Hyperspectral image processing library
|
||||
Copyright (C) 2021 University of Wisconsin
|
||||
|
||||
Authors: Adam Chlus, Zhiwei Ye, Philip Townsend.
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, version 3 of the License.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
The :mod:`hytools.io` module includes functions for reading
|
||||
from multiple file formats and writing to ENVI formatted binary files.
|
||||
"""
|
||||
from .envi import *
|
||||
697
Flexbrdf/hytools/io/envi.py
Normal file
697
Flexbrdf/hytools/io/envi.py
Normal file
@ -0,0 +1,697 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
HyTools: Hyperspectral image processing library
|
||||
Copyright (C) 2021 University of Wisconsin
|
||||
|
||||
Authors: Adam Chlus, Zhiwei Ye, Philip Townsend.
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, version 3 of the License.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Functions for reading and writing ENVI formatted binary files
|
||||
|
||||
Todo:
|
||||
* Implement opening of ENVI files with different byte order
|
||||
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
from collections import Counter
|
||||
import numpy as np
|
||||
|
||||
# ENVI datatype conversion dictionary
|
||||
dtype_dict = {1:np.uint8,
|
||||
2:np.int16,
|
||||
3:np.int32,
|
||||
4:np.float32,
|
||||
5:np.float64,
|
||||
12:np.uint16,
|
||||
13:np.uint32,
|
||||
14:np.int64,
|
||||
15:np.uint64}
|
||||
|
||||
# Dictionary of all ENVI header fields
|
||||
field_dict = {"acquisition time": "str",
|
||||
"band names":"list_str",
|
||||
"bands": "int",
|
||||
"bbl": "list_float",
|
||||
"byte order": "int",
|
||||
"class lookup": "str",
|
||||
"class names": "str",
|
||||
"classes": "int",
|
||||
"cloud cover": "float",
|
||||
"complex function": "str",
|
||||
"coordinate system string": "str",
|
||||
"correction factors": "list_float",
|
||||
"data gain values": "list_float",
|
||||
"data ignore value": "float",
|
||||
"data offset values": "list_float",
|
||||
"data reflectance gain values": "list_float",
|
||||
"data reflectance offset values": "list_float",
|
||||
"data type": "int",
|
||||
"default bands": "list_float",
|
||||
"default stretch": "str",
|
||||
"dem band": "int",
|
||||
"dem file": "str",
|
||||
"description": "str",
|
||||
"envi description":"str",
|
||||
"file type": "str",
|
||||
"fwhm": "list_float",
|
||||
"geo points": "list_float",
|
||||
"header offset": "int",
|
||||
"interleave": "str",
|
||||
"lines": "int",
|
||||
"map info": "list_str",
|
||||
"pixel size": "list_str",
|
||||
"projection info": "str",
|
||||
"read procedures": "str",
|
||||
"reflectance scale factor": "float",
|
||||
"rpc info": "str",
|
||||
"samples":"int",
|
||||
"security tag": "str",
|
||||
"sensor type": "str",
|
||||
"smoothing factors": "list_float",
|
||||
"solar irradiance": "float",
|
||||
"spectra names": "list_str",
|
||||
"sun azimuth": "float",
|
||||
"sun elevation": "float",
|
||||
"wavelength": "list_float",
|
||||
"wavelength units": "str",
|
||||
"x start": "float",
|
||||
"y start": "float",
|
||||
"z plot average": "str",
|
||||
"z plot range": "str",
|
||||
"z plot titles": "str"}
|
||||
|
||||
|
||||
def open_envi(hy_obj,anc_path = {}, ext = False, glt_path = None):
|
||||
"""Open ENVI formatted image file and populate Hytools object.
|
||||
|
||||
|
||||
Args:
|
||||
src_file (str): Pathname of input ENVI image file, header assumed to be located in
|
||||
same directory.
|
||||
anc_path (dict): Dictionary with pathnames and band numbers of ancillary datasets.
|
||||
ext: (bool) Input ENVI file has a file extension
|
||||
|
||||
Returns:
|
||||
HyTools file object: Populated HyTools file object.
|
||||
|
||||
"""
|
||||
|
||||
header_file = os.path.splitext(hy_obj.file_name)[0] + ".hdr"
|
||||
|
||||
|
||||
if not os.path.isfile(header_file):
|
||||
print("ERROR: Header file not found.")
|
||||
return None
|
||||
|
||||
header_dict = parse_envi_header(header_file)
|
||||
hy_obj.lines = header_dict["lines"]
|
||||
hy_obj.columns = header_dict["samples"]
|
||||
hy_obj.bands = header_dict["bands"]
|
||||
hy_obj.bad_bands = np.array([False for band in range(hy_obj.bands)])
|
||||
hy_obj.interleave = header_dict["interleave"]
|
||||
hy_obj.fwhm = header_dict["fwhm"]
|
||||
hy_obj.wavelengths = header_dict["wavelength"]
|
||||
hy_obj.wavelength_units = header_dict["wavelength units"]
|
||||
hy_obj.dtype = dtype_dict[header_dict["data type"]]
|
||||
hy_obj.no_data = header_dict['data ignore value']
|
||||
hy_obj.map_info = header_dict['map info']
|
||||
hy_obj.byte_order = header_dict['byte order']
|
||||
hy_obj.anc_path = anc_path
|
||||
hy_obj.header_file = header_file
|
||||
hy_obj.transform = calc_geotransform(header_dict['map info'])
|
||||
if bool(header_dict['coordinate system string']):
|
||||
hy_obj.projection = header_dict['coordinate system string']
|
||||
else:
|
||||
hy_obj.projection = ''
|
||||
|
||||
if hy_obj.byte_order == 1:
|
||||
hy_obj.endianness = 'big'
|
||||
else:
|
||||
hy_obj.endianness = 'little'
|
||||
|
||||
if isinstance(header_dict['bbl'],np.ndarray):
|
||||
hy_obj.bad_bands = np.array([x==1 for x in header_dict['bbl']])
|
||||
if header_dict["interleave"] == 'bip':
|
||||
hy_obj.shape = (hy_obj.lines, hy_obj.columns, hy_obj.bands)
|
||||
elif header_dict["interleave"] == 'bil':
|
||||
hy_obj.shape = (hy_obj.lines, hy_obj.bands, hy_obj.columns)
|
||||
elif header_dict["interleave"] == 'bsq':
|
||||
hy_obj.shape = (hy_obj.bands, hy_obj.lines, hy_obj.columns)
|
||||
else:
|
||||
print("ERROR: Unrecognized interleave type.")
|
||||
hy_obj = None
|
||||
|
||||
# If no_data value is not specified guess using image corners.
|
||||
if hy_obj.no_data is None:
|
||||
hy_obj.load_data()
|
||||
band_ind = 5 if hy_obj.bands > 10 else 0
|
||||
if header_dict["interleave"] == 'bip':
|
||||
up_l = hy_obj.data[0,0,band_ind]
|
||||
up_r = hy_obj.data[0,-1,band_ind]
|
||||
low_l = hy_obj.data[-1,0,band_ind]
|
||||
low_r = hy_obj.data[-1,-1,band_ind]
|
||||
elif header_dict["interleave"] == 'bil':
|
||||
up_l = hy_obj.data[0,band_ind,0]
|
||||
up_r = hy_obj.data[0,band_ind,-1]
|
||||
low_l = hy_obj.data[-1,band_ind,0]
|
||||
low_r = hy_obj.data[-1,band_ind,-1]
|
||||
elif header_dict["interleave"] == 'bsq':
|
||||
up_l = hy_obj.data[band_ind,0,0]
|
||||
up_r = hy_obj.data[band_ind,0,-1]
|
||||
low_l = hy_obj.data[band_ind,-1,0]
|
||||
low_r = hy_obj.data[band_ind,-1,-1]
|
||||
|
||||
if hy_obj.endianness != sys.byteorder:
|
||||
up_l = up_l.byteswap()
|
||||
up_r = up_r.byteswap()
|
||||
low_l = low_l.byteswap()
|
||||
low_r = low_r.byteswap()
|
||||
|
||||
counts = {v: k for k, v in Counter([up_l,up_r,low_l,low_r]).items()}
|
||||
hy_obj.no_data = counts[max(counts.keys())]
|
||||
hy_obj.close_data()
|
||||
|
||||
if bool(glt_path):
|
||||
glt_meta_dict = parse_glt_envi(glt_path)
|
||||
|
||||
hy_obj.glt_path = glt_meta_dict["glt_path"]
|
||||
hy_obj.glt_map_info = glt_meta_dict["map_info"]
|
||||
hy_obj.lines_glt = glt_meta_dict["lines_glt"]
|
||||
hy_obj.columns_glt = glt_meta_dict["columns_glt"]
|
||||
hy_obj.glt_transform = glt_meta_dict["transform"]
|
||||
hy_obj.glt_projection = glt_meta_dict["projection"]
|
||||
|
||||
del glt_meta_dict
|
||||
|
||||
del header_dict
|
||||
return hy_obj
|
||||
|
||||
|
||||
class WriteENVI:
|
||||
"""Iterator class for writing to an ENVI data file.
|
||||
|
||||
"""
|
||||
def __init__(self,output_name,header_dict):
|
||||
"""
|
||||
Args:
|
||||
output_name (str): Pathname of output ENVI data file.
|
||||
header_dict (dict): Dictionary containing ENVI header information.
|
||||
|
||||
Returns:
|
||||
None.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
self.interleave = header_dict['interleave']
|
||||
self.header_dict = header_dict
|
||||
self.output_name =output_name
|
||||
dtype = dtype_dict[header_dict["data type"]]
|
||||
lines = header_dict['lines']
|
||||
columns = header_dict['samples']
|
||||
bands = header_dict['bands']
|
||||
|
||||
if self.interleave == "bip":
|
||||
self.data = np.memmap(output_name,dtype = dtype,
|
||||
mode='w+', shape = (lines,columns,bands))
|
||||
elif self.interleave == "bil":
|
||||
self.data = np.memmap(output_name,dtype = dtype,
|
||||
mode='w+', shape =(lines,bands,columns))
|
||||
elif self.interleave == "bsq":
|
||||
self.data = np.memmap(output_name,dtype = dtype,
|
||||
mode='w+',shape =(bands,lines,columns))
|
||||
write_envi_header(self.output_name,self.header_dict)
|
||||
|
||||
def write_line(self,line,index):
|
||||
"""
|
||||
Args:
|
||||
line (numpy.ndarray): Line array (columns,bands).
|
||||
index (int): Zero-based line index.
|
||||
|
||||
Returns:
|
||||
None.
|
||||
|
||||
"""
|
||||
|
||||
if self.interleave == "bip":
|
||||
self.data[index,:,:] = line
|
||||
|
||||
elif self.interleave == "bil":
|
||||
self.data[index,:,:] = np.moveaxis(line,0,1)
|
||||
|
||||
elif self.interleave == "bsq":
|
||||
self.data[:,index,:] = np.moveaxis(line,0,1)
|
||||
|
||||
|
||||
def write_line_glt(self,arr,glt_indices_y,glt_indices_x):
|
||||
"""
|
||||
Args:
|
||||
line (numpy.ndarray): Line array (columns,bands).
|
||||
index (int): Zero-based line index.
|
||||
|
||||
Returns:
|
||||
None.
|
||||
|
||||
"""
|
||||
|
||||
if self.interleave == "bip":
|
||||
self.data[glt_indices_y,glt_indices_x,:] = arr
|
||||
|
||||
elif self.interleave == "bil":
|
||||
self.data[glt_indices_y,:,glt_indices_x] = arr #np.moveaxis(line,0,1)
|
||||
|
||||
elif self.interleave == "bsq":
|
||||
self.data[:,glt_indices_y,glt_indices_x] = np.moveaxis(arr,0,1)
|
||||
|
||||
|
||||
def write_column(self,column,index):
|
||||
"""
|
||||
Args:
|
||||
column (numpy.ndarray): Column array (lines,bands).
|
||||
index (int): Zero-based column index.
|
||||
|
||||
Returns:
|
||||
None.
|
||||
|
||||
"""
|
||||
|
||||
if self.interleave == "bip":
|
||||
self.data[:,index,:] = column
|
||||
elif self.interleave == "bil":
|
||||
self.data[:,:,index] = column
|
||||
elif self.interleave == "bsq":
|
||||
self.data[:,:,index] = np.moveaxis(column,0,1)
|
||||
|
||||
def write_band(self,band,index):
|
||||
"""
|
||||
Args:
|
||||
band (numpy.ndarray): Band array (lines,columns).
|
||||
index (int): Zero-based band index.
|
||||
|
||||
Returns:
|
||||
None.
|
||||
|
||||
"""
|
||||
|
||||
if self.interleave == "bip":
|
||||
self.data[:,:,index] = band
|
||||
elif self.interleave == "bil":
|
||||
self.data[:,index,:] = band
|
||||
elif self.interleave == "bsq":
|
||||
self.data[index,:,:]= band
|
||||
|
||||
def write_band_glt(self,band,index,glt_indices,fill_mask):
|
||||
"""
|
||||
Args:
|
||||
band (numpy.ndarray): Band array (lines,columns).
|
||||
index (int): Zero-based band index.
|
||||
glt_indices (numpy.ndarray,numpy.ndarray): Zero-based tuple indices.
|
||||
|
||||
Returns:
|
||||
None.
|
||||
|
||||
"""
|
||||
|
||||
if self.interleave == "bip":
|
||||
self.data[:,:,index][fill_mask] = band[glt_indices]
|
||||
self.data[:,:,index][~fill_mask] = self.header_dict['data ignore value']
|
||||
elif self.interleave == "bil":
|
||||
self.data[:,index,:][fill_mask] = band[glt_indices]
|
||||
self.data[:,index,:][~fill_mask] = self.header_dict['data ignore value']
|
||||
elif self.interleave == "bsq":
|
||||
self.data[index,:,:][fill_mask] = band[glt_indices]
|
||||
self.data[index,:,:][~fill_mask] = self.header_dict['data ignore value']
|
||||
|
||||
|
||||
|
||||
def write_chunk(self,chunk,line_index,column_index):
|
||||
"""
|
||||
Args:
|
||||
chunk (TYPE): Chunks array (chunk lines,chunk columns,bands).
|
||||
line_index (int): Zero-based upper line index.
|
||||
column_index (int): Zero-based left column index.
|
||||
|
||||
Returns:
|
||||
None.
|
||||
|
||||
"""
|
||||
|
||||
x_start = column_index
|
||||
x_end = column_index + chunk.shape[1]
|
||||
y_start = line_index
|
||||
y_end = line_index + chunk.shape[0]
|
||||
|
||||
if self.interleave == "bip":
|
||||
self.data[y_start:y_end,x_start:x_end,:] = chunk
|
||||
elif self.interleave == "bil":
|
||||
self.data[y_start:y_end,:,x_start:x_end] = np.moveaxis(chunk,-1,-2)
|
||||
elif self.interleave == "bsq":
|
||||
self.data[:,y_start:y_end,x_start:x_end] = np.moveaxis(chunk,-1,0)
|
||||
|
||||
def write_pixel(self,pixel,line_index,column_index):
|
||||
"""
|
||||
Args:
|
||||
pixel (TYPE): pixel array (bands).
|
||||
line_index (int): Zero-based upper line index.
|
||||
column_index (int): Zero-based left column index.
|
||||
|
||||
Returns:
|
||||
None.
|
||||
|
||||
"""
|
||||
|
||||
if self.interleave == "bip":
|
||||
self.data[line_index,column_index,:] = pixel
|
||||
elif self.interleave == "bil":
|
||||
self.data[line_index,:,column_index] = pixel
|
||||
elif self.interleave == "bsq":
|
||||
self.data[:,line_index,column_index] = pixel
|
||||
|
||||
|
||||
def close(self):
|
||||
"""Delete numpy memmap.
|
||||
"""
|
||||
del self.data
|
||||
|
||||
def envi_header_from_neon(hy_obj, interleave = 'bsq'):
|
||||
"""Create an ENVI header dictionary from NEON metadata
|
||||
|
||||
Args:
|
||||
hy_obj (Hytools object): Populated HyTools file object.
|
||||
interleave (str, optional): Date interleave type. Defaults to 'bil'.
|
||||
|
||||
Returns:
|
||||
dict: Populated ENVI header dictionary.
|
||||
|
||||
"""
|
||||
|
||||
header_dict = {}
|
||||
header_dict["ENVI description"] = "{}"
|
||||
header_dict["samples"] = hy_obj.columns
|
||||
header_dict["lines"] = hy_obj.lines
|
||||
header_dict["bands"] = hy_obj.bands
|
||||
header_dict["header offset"] = 0
|
||||
header_dict["file type"] = "ENVI Standard"
|
||||
header_dict["data type"] = 2
|
||||
header_dict["interleave"] = interleave
|
||||
header_dict["sensor type"] = ""
|
||||
header_dict["byte order"] = 0
|
||||
header_dict["map info"] = hy_obj.map_info
|
||||
header_dict["coordinate system string"] = hy_obj.projection
|
||||
header_dict["wavelength units"] = hy_obj.wavelength_units
|
||||
header_dict["data ignore value"] =hy_obj.no_data
|
||||
header_dict["wavelength"] =hy_obj.wavelengths
|
||||
return header_dict
|
||||
|
||||
def envi_header_from_nc(hy_obj, interleave = 'bsq', warp_glt = False):
|
||||
"""Create an ENVI header dictionary from NetCDF metadata
|
||||
|
||||
Args:
|
||||
hy_obj (Hytools object): Populated HyTools file object.
|
||||
interleave (str, optional): Date interleave type. Defaults to 'bil'.
|
||||
|
||||
Returns:
|
||||
dict: Populated ENVI header dictionary.
|
||||
|
||||
"""
|
||||
|
||||
header_dict = {}
|
||||
header_dict["ENVI description"] = "{}"
|
||||
|
||||
if warp_glt == False:
|
||||
header_dict["samples"] = hy_obj.columns
|
||||
header_dict["lines"] = hy_obj.lines
|
||||
header_dict["map info"] = hy_obj.map_info
|
||||
header_dict["coordinate system string"] = "{%s}" % hy_obj.projection if hy_obj.projection else "{}"
|
||||
header_dict["projection"] = hy_obj.projection
|
||||
header_dict["transform"] = hy_obj.transform
|
||||
|
||||
else:
|
||||
header_dict["samples"] = hy_obj.columns_glt
|
||||
header_dict["lines"] = hy_obj.lines_glt
|
||||
header_dict["map info"] = hy_obj.glt_map_info
|
||||
header_dict["coordinate system string"] = "{%s}" % hy_obj.glt_projection if hy_obj.glt_projection else "{}"
|
||||
header_dict["projection"] = hy_obj.glt_projection
|
||||
header_dict["transform"] = hy_obj.glt_transform
|
||||
|
||||
header_dict["bands"] = 2 #hy_obj.bands
|
||||
header_dict["header offset"] = 0
|
||||
header_dict["file type"] = "ENVI Standard"
|
||||
header_dict["data type"] = 4
|
||||
header_dict["interleave"] = interleave
|
||||
header_dict["sensor type"] = ""
|
||||
header_dict["byte order"] = 0
|
||||
|
||||
|
||||
header_dict["wavelength units"] = hy_obj.wavelength_units
|
||||
header_dict["data ignore value"] = hy_obj.no_data
|
||||
header_dict["wavelength"] = hy_obj.wavelengths
|
||||
return header_dict
|
||||
|
||||
|
||||
def write_envi_header(output_name,header_dict,mode = 'w'):
|
||||
"""Write ENVI header file to disk.
|
||||
|
||||
Args:
|
||||
output_name (str): Header file pathname.
|
||||
header_dict (dict): Populated ENVI header dictionary.
|
||||
mode (str): File open mode. default: w
|
||||
|
||||
Returns:
|
||||
None.
|
||||
|
||||
"""
|
||||
|
||||
base_name = os.path.splitext(output_name)[0]
|
||||
header_file = open(base_name + ".hdr",mode)
|
||||
header_file.write("ENVI\n")
|
||||
|
||||
for key in header_dict.keys():
|
||||
value = header_dict[key]
|
||||
# Convert list to comma separated strings
|
||||
if isinstance(value,(list,np.ndarray)):
|
||||
value = "{%s}" % ",".join(map(str, value))
|
||||
elif key == "coordinate system string" and value and isinstance(value, str):
|
||||
# 对 coordinate system string 字段确保有花括号包围
|
||||
if not value.startswith("{"):
|
||||
value = "{%s}" % value
|
||||
else:
|
||||
value = str(value)
|
||||
# Skip entires with nan as value
|
||||
if value != 'None':
|
||||
header_file.write("%s = %s\n" % (key,value))
|
||||
header_file.close()
|
||||
|
||||
|
||||
|
||||
def envi_header_dict():
|
||||
"""
|
||||
Returns:
|
||||
dict: Empty ENVI header dictionary.
|
||||
|
||||
"""
|
||||
return {key:None for (key,value) in field_dict.items()}
|
||||
|
||||
|
||||
def envi_read_line(data,index,interleave):
|
||||
"""
|
||||
Args:
|
||||
data (numpy.memmap): Numpy memory-map.
|
||||
index (int): Zero-based line index.
|
||||
interleave (str): Data interleave type.
|
||||
|
||||
Returns:
|
||||
numpy.ndarray: Line array (columns, bands).
|
||||
|
||||
"""
|
||||
|
||||
if interleave == "bip":
|
||||
line = data[index,:,:]
|
||||
elif interleave == "bil":
|
||||
line = np.moveaxis(data[index,:,:],0,1)
|
||||
elif interleave == "bsq":
|
||||
line = np.moveaxis(data[:,index,:],0,1)
|
||||
return line
|
||||
|
||||
def envi_read_column(data,index,interleave):
|
||||
"""
|
||||
Args:
|
||||
data (numpy.memmap): Numpy memory-map.
|
||||
index (int): Zero-based column index.
|
||||
interleave (str): Data interleave type.
|
||||
|
||||
Returns:
|
||||
numpy.ndarray: Column array (lines,bands).
|
||||
|
||||
"""
|
||||
|
||||
if interleave == "bip":
|
||||
column = data[:,index,:]
|
||||
elif interleave == "bil":
|
||||
column = data[:,:,index]
|
||||
elif interleave == "bsq":
|
||||
column = np.moveaxis(data[:,:,index],0,1)
|
||||
return column
|
||||
|
||||
def envi_read_band(data,index,interleave):
|
||||
"""
|
||||
Args:
|
||||
data (numpy.memmap): Numpy memory-map.
|
||||
index (int): Zero-based line index.
|
||||
interleave (str): Data interleave type.
|
||||
|
||||
Returns:
|
||||
numpy.ndarray: Band array (lines,columns).
|
||||
|
||||
"""
|
||||
|
||||
if interleave == "bip":
|
||||
band = data[:,:,index]
|
||||
elif interleave == "bil":
|
||||
band = data[:,index,:]
|
||||
elif interleave == "bsq":
|
||||
band = data[index,:,:]
|
||||
return band
|
||||
|
||||
def envi_read_pixels(data,lines,columns,interleave):
|
||||
"""
|
||||
Args:
|
||||
data (numpy.memmap): Numpy memory-map.
|
||||
lines (list): List of zero-indexed line indices.
|
||||
columns (list): List of zero-indexed column indices.
|
||||
interleave (str): Data interleave type.
|
||||
|
||||
Returns:
|
||||
numpy.ndarray: Pixel array (pixels,bands).
|
||||
|
||||
"""
|
||||
if interleave == "bip":
|
||||
pixels = data[lines,columns,:]
|
||||
elif interleave == "bil":
|
||||
pixels = data[lines,:,columns]
|
||||
elif interleave == "bsq":
|
||||
pixels = data[:,lines,columns]
|
||||
return pixels
|
||||
|
||||
|
||||
def envi_read_chunk(data,col_start,col_end,line_start,line_end,interleave):
|
||||
"""
|
||||
Args:
|
||||
data (numpy.memmap): Numpy memory-map.
|
||||
col_start (int): Zero-based left column index.
|
||||
col_end (int): Non-inclusive zero-based right column index.
|
||||
line_start (int): Zero-based top line index.
|
||||
line_end (int): Non-inclusive zero-based bottom line index.
|
||||
interleave (str): Data interleave type.
|
||||
|
||||
Returns:
|
||||
numpy.ndarray: Chunk array (line_end-line_start,col_end-col_start,bands).
|
||||
|
||||
"""
|
||||
|
||||
if interleave == "bip":
|
||||
chunk = data[line_start:line_end,col_start:col_end,:]
|
||||
elif interleave == "bil":
|
||||
chunk = np.moveaxis(data[line_start:line_end,:,col_start:col_end],-1,-2)
|
||||
elif interleave == "bsq":
|
||||
chunk = np.moveaxis(data[:,line_start:line_end,col_start:col_end],0,-1)
|
||||
return chunk
|
||||
|
||||
def calc_geotransform(mapinfo):
|
||||
if mapinfo[-1].startswith('rotation'):
|
||||
rot_ang_rad = np.radians(float(mapinfo[-1].split('=')[1]))
|
||||
pixel_size = float(mapinfo[5])
|
||||
|
||||
new_rot_mat = pixel_size * np.array([[np.cos(rot_ang_rad),-np.sin(rot_ang_rad)],[np.sin(rot_ang_rad),np.cos(rot_ang_rad)]])@np.array([[1,0],[0,-1]])
|
||||
geotransform = (float(mapinfo[3]),new_rot_mat[0,0],new_rot_mat[0,1],
|
||||
float(mapinfo[4]),new_rot_mat[1,0],new_rot_mat[1,1])
|
||||
else:
|
||||
# same as 0 rotation
|
||||
geotransform = (float(mapinfo[3]),float(mapinfo[5]),0,
|
||||
float(mapinfo[4]),0,-float(mapinfo[6]))
|
||||
return geotransform
|
||||
|
||||
def parse_glt_envi(glt_path):
|
||||
glt_meta_dict = {}
|
||||
glt_meta_dict["glt_path"] = glt_path
|
||||
|
||||
glt_header_file = os.path.splitext(glt_path[list(glt_path.keys())[0]][0])[0] + ".hdr"
|
||||
glt_header=parse_envi_header(glt_header_file)
|
||||
glt_meta_dict["map_info"] = glt_header["map info"]
|
||||
glt_meta_dict["lines_glt"] = glt_header["lines"]
|
||||
glt_meta_dict["columns_glt"] = glt_header["samples"]
|
||||
|
||||
glt_meta_dict["transform"] = calc_geotransform(glt_header["map info"])
|
||||
|
||||
if "coordinate system string" in glt_header:
|
||||
glt_meta_dict["projection"] = glt_header["coordinate system string"]
|
||||
else:
|
||||
glt_meta_dict["projection"] = ''
|
||||
|
||||
return glt_meta_dict
|
||||
|
||||
|
||||
def parse_envi_header(header_file):
|
||||
"""
|
||||
Args:
|
||||
header_file (str): Header file pathname.
|
||||
|
||||
Returns:
|
||||
dict: Populated header dictionary.
|
||||
|
||||
"""
|
||||
|
||||
header_dict = envi_header_dict()
|
||||
header_file = open(header_file,'r')
|
||||
line = header_file.readline()
|
||||
|
||||
while line :
|
||||
if "=" in line:
|
||||
key,value = line.rstrip().split("=",1)
|
||||
# Add fields not in ENVI default list
|
||||
if key.strip() not in field_dict.keys():
|
||||
field_dict[key.strip()] = "str"
|
||||
val_type = field_dict[key.strip()]
|
||||
|
||||
if "{" in value and not "}" in value:
|
||||
while "}" not in line:
|
||||
line = header_file.readline()
|
||||
value+=line
|
||||
|
||||
if '{}' in value:
|
||||
value = None
|
||||
elif val_type == "list_float":
|
||||
value= np.array([float(x) for x in value.translate(str.maketrans("\n{}"," ")).split(",")])
|
||||
elif val_type == "list_int":
|
||||
value= np.array([int(x) for x in value.translate(str.maketrans("\n{}"," ")).split(",")])
|
||||
elif val_type == "list_str":
|
||||
value= [x.strip() for x in value.translate(str.maketrans("\n{}"," ")).split(",")]
|
||||
elif val_type == "int":
|
||||
value = int(value.translate(str.maketrans("\n{}"," ")))
|
||||
elif val_type == "float":
|
||||
value = float(value.translate(str.maketrans("\n{}"," ")))
|
||||
elif val_type == "str":
|
||||
value = value.translate(str.maketrans("\n{}"," ")).strip().lower()
|
||||
|
||||
header_dict[key.strip()] = value
|
||||
line = header_file.readline()
|
||||
|
||||
# Fill unused fields with None
|
||||
for key in field_dict:
|
||||
if key not in header_dict.keys():
|
||||
header_dict[key] = None
|
||||
|
||||
header_file.close()
|
||||
return header_dict
|
||||
71
Flexbrdf/hytools/io/neon.py
Normal file
71
Flexbrdf/hytools/io/neon.py
Normal file
@ -0,0 +1,71 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
HyTools: Hyperspectral image processing library
|
||||
Copyright (C) 2021 University of Wisconsin
|
||||
|
||||
Authors: Adam Chlus, Zhiwei Ye, Philip Townsend.
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, version 3 of the License.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
NEON AOP HDF opener
|
||||
"""
|
||||
import h5py
|
||||
import numpy as np
|
||||
|
||||
|
||||
def open_neon(hy_obj, no_data = -9999):
|
||||
"""Load and parse NEON formated HDF image into a HyTools file object.
|
||||
|
||||
Args:
|
||||
src_file (str): pathname of input HDF file.
|
||||
no_data (float, optional): No data value. Defaults to -9999.
|
||||
|
||||
Returns:
|
||||
HyTools file object: Populated HyTools file object.
|
||||
|
||||
"""
|
||||
|
||||
hdf_obj = h5py.File(hy_obj.file_name,'r')
|
||||
hy_obj.base_key = list(hdf_obj.keys())[0]
|
||||
metadata = hdf_obj[hy_obj.base_key]["Reflectance"]["Metadata"]
|
||||
data = hdf_obj[hy_obj.base_key]["Reflectance"]["Reflectance_Data"]
|
||||
|
||||
hy_obj.projection = metadata['Coordinate_System']['Coordinate_System_String'][()].decode("utf-8")
|
||||
hy_obj.map_info = metadata['Coordinate_System']['Map_Info'][()].decode("utf-8").split(',')
|
||||
hy_obj.transform = (float(hy_obj.map_info [3]),float(hy_obj.map_info [1]),0,float(hy_obj.map_info [4]),0,-float(hy_obj.map_info [2]))
|
||||
hy_obj.fwhm = metadata['Spectral_Data']['FWHM'][()]
|
||||
hy_obj.wavelengths = metadata['Spectral_Data']['Wavelength'][()]
|
||||
hy_obj.wavelength_units = metadata['Spectral_Data']['Wavelength'].attrs['Units']
|
||||
hy_obj.lines = data.shape[0]
|
||||
hy_obj.columns = data.shape[1]
|
||||
hy_obj.bands = data.shape[2]
|
||||
hy_obj.bad_bands = np.array([False for band in range(hy_obj.bands)])
|
||||
hy_obj.no_data = no_data
|
||||
hy_obj.anc_path = {'path_length': ['Ancillary_Imagery','Path_Length'],
|
||||
'sensor_az': ['to-sensor_Azimuth_Angle'],
|
||||
'sensor_zn': ['to-sensor_Zenith_Angle'],
|
||||
'solar_az': ['Logs','Solar_Azimuth_Angle'],
|
||||
'solar_zn': ['Logs','Solar_Zenith_Angle'],
|
||||
'slope': ['Ancillary_Imagery','Slope'],
|
||||
'aspect':['Ancillary_Imagery','Aspect'],
|
||||
'aod': ['Ancillary_Imagery','Aerosol_Optical_Depth'],
|
||||
'sky_view': ['Ancillary_Imagery','Sky_View_Factor'],
|
||||
'illum_factor': ['Ancillary_Imagery','Illumination_Factor'],
|
||||
'elevation;': ['Ancillary_Imagery','Smooth_Surface_Elevation'],
|
||||
'cast_shadow': ['Ancillary_Imagery','Cast_Shadow'],
|
||||
'dense_veg': ['Ancillary_Imagery','Dark_Dense_Vegetation_Classification'],
|
||||
'visibility_index': ['Ancillary_Imagery','Visibility_Index_Map'],
|
||||
'haze_water_cloud': ['Ancillary_Imagery','Haze_Water_Cloud_Map'],
|
||||
'water_vapor': ['Ancillary_Imagery','Water_Vapor_Column']}
|
||||
|
||||
return hy_obj
|
||||
426
Flexbrdf/hytools/io/netcdf.py
Normal file
426
Flexbrdf/hytools/io/netcdf.py
Normal file
@ -0,0 +1,426 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
HyTools: Hyperspectral image processing library
|
||||
Copyright (C) 2021 University of Wisconsin
|
||||
|
||||
Authors: Adam Chlus, Zhiwei Ye, Philip Townsend.
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, version 3 of the License.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
NASA NetCDF opener
|
||||
"""
|
||||
import os
|
||||
import h5py
|
||||
import h5netcdf
|
||||
import numpy as np
|
||||
from .envi import parse_envi_header, WriteENVI, parse_glt_envi
|
||||
|
||||
unit_dict = {'nm':'nanometers'}
|
||||
utm_zone_dict = {'N':'North','S':'South'}
|
||||
|
||||
def open_netcdf(hy_obj, sensor,anc_path = {}, glt_path = {}):
|
||||
"""Load and parse NASA formatted NetCDF AVIRIS/EMIT image into a HyTools file object.
|
||||
|
||||
Args:
|
||||
HyTools file object: Populated HyTools file object.
|
||||
sensor (str): sensor name for reading, either 'emit' (EMIT) or 'ncav' (AVIRIS)
|
||||
anc_path (dict): Dictionary with pathnames and band numbers of ancillary datasets.
|
||||
glt_path (list): Dictionary with pathnames and band numbers of external GLT datasets.
|
||||
Returns:
|
||||
HyTools file object: Populated HyTools file object.
|
||||
|
||||
"""
|
||||
|
||||
nc4_obj = h5py.File(hy_obj.file_name,'r')
|
||||
|
||||
if "radiance" in list(nc4_obj.keys()):
|
||||
data_var_name = "radiance"
|
||||
else:
|
||||
#elif "reflectance" in list(nc4_obj.keys()):
|
||||
data_var_name = "reflectance"
|
||||
hy_obj.base_key = data_var_name
|
||||
|
||||
if "geolocation_lookup_table" in list(nc4_obj.keys()):
|
||||
glt_var_name = "geolocation_lookup_table"
|
||||
elif "location" in list(nc4_obj.keys()):
|
||||
glt_var_name = "location"
|
||||
else:
|
||||
glt_var_name = None
|
||||
|
||||
metadata = nc4_obj.attrs
|
||||
if sensor=='AV':
|
||||
data = nc4_obj[data_var_name][data_var_name]
|
||||
hy_obj.fwhm = nc4_obj[data_var_name]['fwhm'][()]
|
||||
hy_obj.wavelengths = nc4_obj[data_var_name]['wavelength'][()]
|
||||
|
||||
if 'units' in nc4_obj[data_var_name]['wavelength'].attrs.keys():
|
||||
hy_obj.wavelength_units = unit_dict[get_attr_string(nc4_obj[data_var_name]['wavelength'].attrs['units'])]
|
||||
elif 'unit' in nc4_obj[data_var_name]['wavelength'].attrs.keys():
|
||||
hy_obj.wavelength_units = get_attr_string(nc4_obj[data_var_name]['wavelength'].attrs['unit'])
|
||||
|
||||
hy_obj.lines = data.shape[1]
|
||||
hy_obj.columns = data.shape[2]
|
||||
hy_obj.bands = data.shape[0]
|
||||
|
||||
elif sensor == 'EMIT':
|
||||
data = nc4_obj[data_var_name]
|
||||
hy_obj.fwhm = nc4_obj['sensor_band_parameters']['fwhm'][()]
|
||||
hy_obj.wavelengths = nc4_obj['sensor_band_parameters']['wavelengths'][()]
|
||||
hy_obj.wavelength_units = unit_dict[get_attr_string(nc4_obj['sensor_band_parameters']['wavelengths'].attrs['units'])]
|
||||
hy_obj.lines = data.shape[0]
|
||||
hy_obj.columns = data.shape[1]
|
||||
hy_obj.bands = data.shape[2]
|
||||
|
||||
hy_obj.bad_bands = np.array(1-nc4_obj['sensor_band_parameters']['good_wavelengths'][()]).astype(np.bool)
|
||||
|
||||
if isinstance(data.attrs['_FillValue'],np.ndarray):
|
||||
hy_obj.no_data = data.attrs['_FillValue'][0]
|
||||
else:
|
||||
hy_obj.no_data = data.attrs['_FillValue']
|
||||
hy_obj.anc_path = anc_path
|
||||
|
||||
if bool(glt_path):
|
||||
glt_meta_dict = parse_glt_envi(glt_path)
|
||||
|
||||
hy_obj.glt_path = glt_meta_dict["glt_path"]
|
||||
hy_obj.glt_map_info = glt_meta_dict["map_info"]
|
||||
hy_obj.lines_glt = glt_meta_dict["lines_glt"]
|
||||
hy_obj.columns_glt = glt_meta_dict["columns_glt"]
|
||||
hy_obj.glt_transform = glt_meta_dict["transform"]
|
||||
hy_obj.glt_projection = glt_meta_dict["projection"]
|
||||
del glt_meta_dict
|
||||
|
||||
if sensor == "EMIT":
|
||||
# EMIT can only has one set of geotransform / GLT, this one will override the built-in GLT
|
||||
hy_obj.projection = hy_obj.glt_projection
|
||||
hy_obj.map_info = hy_obj.glt_map_info
|
||||
hy_obj.transform = hy_obj.glt_transform
|
||||
|
||||
else:
|
||||
if sensor == 'EMIT':
|
||||
|
||||
hy_obj.glt_path = { "glt_x": ["location","glt_x"],
|
||||
"glt_y": ["location","glt_y"]}
|
||||
hy_obj.projection = get_attr_string(metadata['spatial_ref'])
|
||||
geotransform = nc4_obj.attrs['geotransform'][()]
|
||||
hy_obj.map_info = ['Geographic Lat/Lon','1','1',
|
||||
str(geotransform[0]),str(geotransform[3]),
|
||||
str(geotransform[1]),str(-geotransform[5]),
|
||||
'WGS-84']
|
||||
hy_obj.transform = tuple(metadata['geotransform'][()])
|
||||
glt_x = nc4_obj['location']['glt_x']
|
||||
|
||||
hy_obj.lines_glt = glt_x.shape[0]
|
||||
hy_obj.columns_glt = glt_x.shape[1]
|
||||
|
||||
hy_obj.glt_projection = hy_obj.projection
|
||||
hy_obj.glt_transform = hy_obj.transform
|
||||
hy_obj.glt_map_info = hy_obj.map_info
|
||||
|
||||
elif sensor == 'AV':
|
||||
if "transverse_mercator" in nc4_obj.keys():
|
||||
spatial_ref_name_tag = "transverse_mercator"
|
||||
elif "projection" in nc4_obj.keys():
|
||||
spatial_ref_name_tag = "projection"
|
||||
else:
|
||||
spatial_ref_name_tag = None
|
||||
|
||||
hy_obj.projection = get_attr_string(nc4_obj[spatial_ref_name_tag].attrs['spatial_ref'])
|
||||
geotransform = [float(x) for x in get_attr_string(nc4_obj[spatial_ref_name_tag].attrs['GeoTransform']).split(' ')]
|
||||
|
||||
utm_zone_tag=((hy_obj.projection).split('UTM zone ')[1]).split('",GEOGCS')[0]
|
||||
hy_obj.map_info = ['UTM','1','1',
|
||||
str(geotransform[0]),str(geotransform[3]),
|
||||
str(geotransform[1]),str(-geotransform[5]),
|
||||
utm_zone_tag[:-1],utm_zone_dict[utm_zone_tag[-1]],'WGS-84']
|
||||
hy_obj.transform = tuple(geotransform)
|
||||
|
||||
hy_obj.glt_path = { "glt_x": [glt_var_name,"sample"], #["geolocation_lookup_table","sample"],
|
||||
"glt_y": [glt_var_name,"line"]} #["geolocation_lookup_table","line"]}
|
||||
|
||||
if glt_var_name is None:
|
||||
hy_obj.lines_glt = hy_obj.lines
|
||||
hy_obj.columns_glt = hy_obj.columns
|
||||
else:
|
||||
glt_x = nc4_obj[glt_var_name]['sample']
|
||||
hy_obj.lines_glt = glt_x.shape[0]
|
||||
hy_obj.columns_glt = glt_x.shape[1]
|
||||
|
||||
if hy_obj.base_key=="radiance":
|
||||
hy_obj.glt_projection = hy_obj.projection
|
||||
hy_obj.glt_transform = hy_obj.transform
|
||||
hy_obj.glt_map_info = hy_obj.map_info
|
||||
|
||||
|
||||
return hy_obj
|
||||
|
||||
def get_attr_string(attr):
|
||||
if isinstance(attr, bytes):
|
||||
return attr.decode("utf-8")
|
||||
return attr
|
||||
|
||||
def set_wavelength_meta(nc4_obj,header_dict,glt_bool):
|
||||
file_type = (header_dict['file_type']).lower()
|
||||
|
||||
if file_type in ["envi","ncav"] or (file_type=="emit" and glt_bool is True):
|
||||
gp=nc4_obj.create_group("reflectance")
|
||||
wavelength_var=nc4_obj.create_variable("/reflectance/wavelength",("wavelength",),
|
||||
data=header_dict['wavelength'],
|
||||
dtype=np.float32)
|
||||
fwhm_var = nc4_obj.create_variable("/reflectance/fwhm",("wavelength",),
|
||||
data=header_dict['fwhm'],
|
||||
dtype=np.float32)
|
||||
elif file_type=="emit":
|
||||
if glt_bool: # handled in above codes
|
||||
pass
|
||||
else: # do not warp with GLT
|
||||
nc4_obj.dimensions["bands"]=header_dict['bands']
|
||||
wavelength_var=nc4_obj.create_variable("/sensor_band_parameters/wavelengths",("bands",),
|
||||
data=np.array(header_dict['wavelength']),
|
||||
dtype=np.float32)
|
||||
fwhm_var = nc4_obj.create_variable("/sensor_band_parameters/fwhm", ("bands",),
|
||||
data=header_dict['fwhm'],
|
||||
dtype=np.float32)
|
||||
|
||||
|
||||
def write_netcdf_refl_meta(nc4_obj,header_dict,glt_bool):
|
||||
set_wavelength_meta(nc4_obj,header_dict,glt_bool)
|
||||
write_netcdf_meta(nc4_obj,header_dict,glt_bool)
|
||||
|
||||
class WriteNetCDF(WriteENVI):
|
||||
"""Iterator class for writing to a NetCDF data file.
|
||||
The class inherites all the write functionss from WriteENVI: write pixel, line, band, chunk, etc.
|
||||
"""
|
||||
def __init__(self,output_name, header_dict, attr_dict, glt_bool, type_tag, band_name=None):
|
||||
"""
|
||||
Args:
|
||||
output_name (str): Pathname of output ENVI data file.
|
||||
header_dict (dict): Dictionary containing ENVI header information.
|
||||
|
||||
Returns:
|
||||
None.
|
||||
|
||||
"""
|
||||
|
||||
dim1_chunk_size = 2**(min(int(np.log2(header_dict['lines'])),8))
|
||||
dim2_chunk_size = 2**(min(int(np.log2(header_dict['samples'])),8))
|
||||
|
||||
if type_tag=="reflectance": # for reflectance
|
||||
self.header_dict = header_dict
|
||||
self.output_name = output_name
|
||||
self.file_type = header_dict['file_type'].lower()
|
||||
|
||||
self.nc4_obj = h5netcdf.File(output_name, "w")
|
||||
|
||||
write_netcdf_refl_meta(self.nc4_obj,header_dict,glt_bool)
|
||||
if self.file_type in ["ncav","envi"]:
|
||||
self.interleave = "bsq"
|
||||
self.data = self.nc4_obj.create_variable("/reflectance/reflectance",
|
||||
("wavelength","northing","easting"),
|
||||
np.float32,
|
||||
chunks=(2,dim1_chunk_size,dim2_chunk_size),
|
||||
compression='gzip')
|
||||
self.data.attrs["grid_mapping"] = "projection"
|
||||
elif self.file_type == "emit":
|
||||
if glt_bool:
|
||||
self.interleave = "bsq"
|
||||
self.data = self.nc4_obj.create_variable("/reflectance/reflectance",
|
||||
("wavelength","northing","easting"),
|
||||
np.float32,
|
||||
chunks=(1,dim1_chunk_size,dim2_chunk_size),
|
||||
compression='gzip')
|
||||
self.data.attrs["grid_mapping"] = "projection"
|
||||
else:
|
||||
self.interleave = "bip"
|
||||
self.data = self.nc4_obj.create_variable("reflectance",
|
||||
("downtrack","crosstrack","bands"),
|
||||
np.float32,
|
||||
chunks=(dim1_chunk_size,dim2_chunk_size,2),
|
||||
compression='gzip')
|
||||
|
||||
self.data.attrs["_FillValue"]=np.array([-9999.0],dtype=np.float32)
|
||||
self.external_nc_attrs(attr_dict)
|
||||
elif type_tag=="mask": # for masks
|
||||
self.interleave = "bsq"
|
||||
self.header_dict = header_dict
|
||||
self.file_type = header_dict['file_type'].lower()
|
||||
self.nc4_obj = h5netcdf.File(output_name, "r+")
|
||||
|
||||
if self.file_type in ["ncav","envi"]:
|
||||
self.data = self.nc4_obj.create_variable(f"/masks/{band_name}",
|
||||
("northing","easting"),
|
||||
np.uint8,
|
||||
chunks=(dim1_chunk_size,dim2_chunk_size),
|
||||
compression='gzip')
|
||||
self.data.attrs["grid_mapping"] = "projection"
|
||||
elif self.file_type == "emit":
|
||||
if glt_bool:
|
||||
self.data = self.nc4_obj.create_variable(f"/masks/{band_name}",
|
||||
("northing","easting"),
|
||||
np.uint8,
|
||||
chunks=(dim1_chunk_size,dim2_chunk_size),
|
||||
compression='gzip')
|
||||
self.data.attrs["grid_mapping"] = "projection"
|
||||
else:
|
||||
self.data = self.nc4_obj.create_variable(f"/masks/{band_name}",
|
||||
("downtrack","crosstrack"),
|
||||
np.uint8,
|
||||
chunks=(dim1_chunk_size,dim2_chunk_size),
|
||||
compression='gzip')
|
||||
self.data.attrs["_FillValue"]=np.array([255],dtype=np.uint8)
|
||||
self.external_nc_attrs(attr_dict)
|
||||
elif type_tag=="trait":
|
||||
self.interleave = "bsq"
|
||||
self.file_type = header_dict['file_type'].lower()
|
||||
self.nc4_obj = h5netcdf.File(output_name, "w")
|
||||
|
||||
self.nc4_obj.dimensions["bands"]=2
|
||||
|
||||
self.interleave = "bsq"
|
||||
|
||||
write_netcdf_meta(self.nc4_obj,header_dict,glt_bool)
|
||||
if self.file_type in ["ncav","envi"]:
|
||||
self.data = self.nc4_obj.create_variable(f"/{band_name}/stack",
|
||||
("bands","northing","easting"),
|
||||
np.float32,
|
||||
chunks=(1,dim1_chunk_size,dim2_chunk_size),
|
||||
compression='gzip')
|
||||
self.data.attrs["grid_mapping"] = "projection"
|
||||
|
||||
elif self.file_type == "emit":
|
||||
if glt_bool:
|
||||
self.data = self.nc4_obj.create_variable(f"/{band_name}/stack",
|
||||
("bands","northing","easting"),
|
||||
np.float32,
|
||||
chunks=(1,dim1_chunk_size,dim2_chunk_size),
|
||||
compression='gzip')
|
||||
self.data.attrs["grid_mapping"] = "projection"
|
||||
else:
|
||||
self.data = self.nc4_obj.create_variable(f"/{band_name}/stack",
|
||||
("bands","downtrack","crosstrack"),
|
||||
np.float32,
|
||||
chunks=(1,dim1_chunk_size,dim2_chunk_size),
|
||||
compression='gzip')
|
||||
|
||||
self.data.attrs["band_names"] = header_dict["band names"][:2]
|
||||
self.data.attrs["_FillValue"] = np.array([-9999.0],dtype=np.float32)
|
||||
|
||||
def write_mask_band(self,band):
|
||||
self.data[:,:] = band
|
||||
|
||||
def write_mask_band_glt(self,band,glt_indices,fill_mask):
|
||||
tmp_band = np.ones(fill_mask.shape)*self.header_dict['data ignore value']
|
||||
tmp_band[fill_mask] = band[glt_indices]
|
||||
tmp_band[~fill_mask] = 255
|
||||
|
||||
self.data[:,:] = tmp_band
|
||||
|
||||
|
||||
def write_glt_dataset(self,glt_x_arr,glt_y_arr,dim_x_name="ortho_x",dim_y_name="ortho_y"):
|
||||
var_glt_x = self.nc4_obj.create_variable("/location/glt_x",(dim_y_name,dim_x_name),
|
||||
data=glt_x_arr,
|
||||
dtype=np.int32,
|
||||
chunks=(256,256),
|
||||
compression='gzip')
|
||||
var_glt_y = self.nc4_obj.create_variable("/location/glt_y",(dim_y_name,dim_x_name),
|
||||
data=glt_y_arr,
|
||||
dtype=np.int32,
|
||||
chunks=(256,256),
|
||||
compression='gzip')
|
||||
|
||||
var_glt_x.attrs["grid_mapping"] = "projection"
|
||||
var_glt_y.attrs["grid_mapping"] = "projection"
|
||||
|
||||
var_glt_x.attrs["_FillValue"]=np.array([0],dtype=np.int32)
|
||||
var_glt_y.attrs["_FillValue"]=np.array([0],dtype=np.int32)
|
||||
|
||||
|
||||
def write_netcdf_band_glt(self,band,index,glt_indices,fill_mask):
|
||||
"""
|
||||
Args:
|
||||
band (numpy.ndarray): Band array (lines,columns).
|
||||
index (int): Zero-based band index.
|
||||
glt_indices (numpy.ndarray,numpy.ndarray): Zero-based tuple indices.
|
||||
|
||||
Returns:
|
||||
None.
|
||||
|
||||
"""
|
||||
|
||||
tmp_band = np.ones(fill_mask.shape)*(-9999)
|
||||
tmp_band[fill_mask] = band[glt_indices]
|
||||
tmp_band[~fill_mask] = -9999
|
||||
|
||||
if self.interleave == "bip":
|
||||
self.data[:,:,index]=tmp_band
|
||||
elif self.interleave == "bil":
|
||||
self.data[:,index,:]=tmp_band
|
||||
elif self.interleave == "bsq":
|
||||
self.data[index,:,:]=tmp_band
|
||||
|
||||
def external_nc_attrs(self,attr_dict):
|
||||
|
||||
if attr_dict is None:
|
||||
return
|
||||
|
||||
for key in attr_dict:
|
||||
split_key = key.split('/')
|
||||
if len(split_key[0])==0:
|
||||
split_key.pop(0)
|
||||
if len(split_key)>1:
|
||||
group_path = '/'+'/'.join(split_key[:-1])
|
||||
self.nc4_obj[group_path].attrs[split_key[-1]]=str(attr_dict[key]).encode("utf-8")
|
||||
else:
|
||||
self.nc4_obj.attrs[key]=str(attr_dict[key]).encode("utf-8")
|
||||
|
||||
|
||||
def close(self):
|
||||
"""Delete
|
||||
"""
|
||||
self.nc4_obj.close()
|
||||
|
||||
def write_netcdf_meta(nc4_obj,header_dict,glt_bool):
|
||||
|
||||
file_type = (header_dict['file_type']).lower()
|
||||
|
||||
if file_type in ["envi","ncav"] or (file_type=="emit" and glt_bool is True):
|
||||
|
||||
transform=header_dict['transform']
|
||||
|
||||
nc4_obj.dimensions["northing"]=header_dict['lines'] #dim0
|
||||
nc4_obj.dimensions["easting"]=header_dict['samples'] #dim1
|
||||
|
||||
tm_var = nc4_obj.create_variable("/projection",data=np.array([0]),dtype=np.uint8)
|
||||
tm_var.attrs["GeoTransform"]=' '.join([str(x) for x in header_dict['transform']]).encode("utf-8")
|
||||
tm_var.attrs["crs_wkt"]=header_dict['projection'].encode("utf-8")
|
||||
tm_var.attrs["spatial_ref"]=header_dict['projection'].encode("utf-8")
|
||||
|
||||
elif file_type=="emit":
|
||||
if glt_bool: # handled in above codes
|
||||
pass
|
||||
else: # do not warp with GLT
|
||||
loc_gp=nc4_obj.create_group("location")
|
||||
nc4_obj.dimensions["downtrack"]=header_dict['lines'] #dim0
|
||||
nc4_obj.dimensions["crosstrack"]=header_dict['samples'] #dim1
|
||||
|
||||
nc4_obj.dimensions["ortho_y"]=header_dict['lines_glt']
|
||||
nc4_obj.dimensions["ortho_x"]=header_dict['samples_glt']
|
||||
|
||||
nc4_obj.attrs["geotransform"]=' '.join([str(x) for x in header_dict['transform']]).encode("utf-8")
|
||||
nc4_obj.attrs["spatial_ref"]=header_dict['projection'].encode("utf-8")
|
||||
nc4_obj.attrs["spatialResolution"]=np.sqrt(header_dict['transform'][1]**2+header_dict['transform'][2]**2)
|
||||
|
||||
tm_var = nc4_obj.create_variable("/projection",data=np.array([0]),dtype=np.uint8)
|
||||
tm_var.attrs["GeoTransform"]=' '.join([str(x) for x in header_dict['transform']]).encode("utf-8")
|
||||
tm_var.attrs["crs_wkt"]=header_dict['projection'].encode("utf-8")
|
||||
tm_var.attrs["spatial_ref"]=header_dict['projection'].encode("utf-8")
|
||||
Reference in New Issue
Block a user