1417 lines
55 KiB
Python
1417 lines
55 KiB
Python
"""
|
||
无人机高光谱线阵相机角度计算程序
|
||
|
||
计算每个像素的太阳天顶角、太阳方位角、传感器天顶角、传感器方位角、相对方位角
|
||
以南向北为正向
|
||
"""
|
||
|
||
import os
|
||
import sys
|
||
import numpy as np
|
||
import pandas as pd
|
||
from datetime import datetime, timedelta
|
||
import time
|
||
import re
|
||
from typing import Dict, List, Tuple, Optional
|
||
from pyproj import Transformer
|
||
import pvlib.solarposition as solarposition
|
||
from pathlib import Path
|
||
import spectral
|
||
import warnings
|
||
|
||
warnings.filterwarnings('ignore')
|
||
|
||
try:
|
||
from tqdm import tqdm
|
||
HAS_TQDM = True
|
||
except ImportError:
|
||
HAS_TQDM = False
|
||
print("警告: 未安装tqdm库,无法显示进度条。建议安装: pip install tqdm")
|
||
|
||
|
||
class UAVAngleCalculator:
|
||
"""无人机高光谱线阵相机角度计算器"""
|
||
|
||
def __init__(self, config: Dict):
|
||
"""
|
||
初始化计算器
|
||
|
||
Args:
|
||
config: 配置字典
|
||
"""
|
||
self.config = config
|
||
self.data_dir = Path(config.get('data_dir', './Data'))
|
||
# times 根目录:内部包含多个“航带名”子目录(或兼容旧结构:直接放 *.times)
|
||
self.times_root_dir = Path(config.get('times_root_dir', config.get('times_dir', config.get('data_dir', './Data'))))
|
||
self.base_height = config.get('base_height', 254) # 基准高度
|
||
self.output_dir = Path(config.get('output_dir', './Output'))
|
||
|
||
# 创建输出目录
|
||
self.output_dir.mkdir(exist_ok=True)
|
||
|
||
# 初始化坐标变换器
|
||
self.utm_crs = "EPSG:32651" # UTM Zone 51N
|
||
self.wgs84_crs = "EPSG:4326"
|
||
self.transformer = Transformer.from_crs(self.wgs84_crs, self.utm_crs, always_xy=True)
|
||
|
||
# 缓存
|
||
self.gps_data = None
|
||
self.times_data = {}
|
||
self.image_info = None
|
||
self.image_data = None
|
||
|
||
def read_bip_image(self, bip_file: Path) -> Tuple[np.ndarray, Dict]:
|
||
"""
|
||
读取BIP格式图像文件
|
||
|
||
Args:
|
||
bip_file: BIP文件路径
|
||
|
||
Returns:
|
||
图像数据数组和图像信息字典
|
||
"""
|
||
print(f"读取BIP图像文件: {bip_file}")
|
||
|
||
# 使用spectral库读取ENVI文件
|
||
print("使用spectral库读取图像数据...")
|
||
start_time = time.time()
|
||
|
||
try:
|
||
# spectral会自动读取HDR文件并解析元数据
|
||
image_data = spectral.open_image(str(bip_file))
|
||
|
||
# 获取图像数组
|
||
image_array = image_data.load()
|
||
|
||
# 解析图像信息
|
||
image_info = self._extract_spectral_info(image_data)
|
||
|
||
read_time = time.time() - start_time
|
||
print(f"图像数据读取完成,耗时: {read_time:.2f} 秒")
|
||
print(f"图像形状: {image_array.shape}")
|
||
|
||
return image_array, image_info
|
||
|
||
except Exception as e:
|
||
print(f"使用spectral读取失败: {e}")
|
||
print("回退到手动解析方法...")
|
||
|
||
# 回退到原来的方法
|
||
return self._read_bip_manual(bip_file)
|
||
|
||
def _extract_band_names_from_hdr(self, hdr_file: Path, band_count: int) -> List[str]:
|
||
"""
|
||
从HDR文件中提取band names,若不存在则生成默认名称
|
||
"""
|
||
if not hdr_file.exists():
|
||
return [f'Band {i + 1}' for i in range(band_count)]
|
||
|
||
try:
|
||
with open(hdr_file, 'r', encoding='utf-8', errors='ignore') as f:
|
||
lines = f.read().splitlines()
|
||
except Exception:
|
||
return [f'Band {i + 1}' for i in range(band_count)]
|
||
|
||
names = []
|
||
in_block = False
|
||
for line in lines:
|
||
line_stripped = line.strip()
|
||
if not in_block:
|
||
if line_stripped.lower().startswith('band names'):
|
||
in_block = True
|
||
if '{' in line_stripped:
|
||
line_stripped = line_stripped.split('{', 1)[1].strip()
|
||
else:
|
||
line_stripped = ''
|
||
if in_block:
|
||
if '}' in line_stripped:
|
||
line_stripped = line_stripped.split('}', 1)[0].strip()
|
||
in_block = False
|
||
if line_stripped:
|
||
parts = [p.strip().strip("'\"") for p in line_stripped.split(',') if p.strip()]
|
||
names.extend(parts)
|
||
|
||
if not in_block and names:
|
||
break
|
||
|
||
if len(names) < band_count:
|
||
names = [f'Band {i + 1}' for i in range(band_count)]
|
||
else:
|
||
names = names[:band_count]
|
||
|
||
return names
|
||
|
||
def _extract_spectral_info(self, spectral_img) -> Dict:
|
||
"""
|
||
从spectral图像对象中提取图像信息
|
||
|
||
Args:
|
||
spectral_img: spectral图像对象
|
||
|
||
Returns:
|
||
图像信息字典
|
||
"""
|
||
info = {}
|
||
|
||
# 基本信息
|
||
info['samples'] = spectral_img.ncols
|
||
info['lines'] = spectral_img.nrows
|
||
info['bands'] = spectral_img.nbands
|
||
info['data_type'] = spectral_img.dtype
|
||
|
||
# 数据类型字节数
|
||
dtype_map = {
|
||
np.uint8: 1,
|
||
np.int16: 2,
|
||
np.float32: 4,
|
||
np.float64: 8,
|
||
np.uint16: 2,
|
||
np.uint32: 4,
|
||
}
|
||
info['data_type_bytes'] = dtype_map.get(spectral_img.dtype, 4)
|
||
|
||
# 交织方式
|
||
info['interleave'] = spectral_img.interleave
|
||
|
||
# 元数据
|
||
metadata = spectral_img.metadata
|
||
print(f"元数据键: {list(metadata.keys())}")
|
||
if 'map info' in metadata:
|
||
# 解析地图信息
|
||
map_info = metadata['map info']
|
||
print(f"map info 类型: {type(map_info)}")
|
||
print(f"map info 内容: {map_info}")
|
||
|
||
# 如果是字典格式(spectral库已解析)
|
||
if isinstance(map_info, dict):
|
||
info['projection'] = map_info.get('projection', 'UTM')
|
||
info['x_ul'] = map_info.get('x_ul', map_info.get('ul_x', 0))
|
||
info['y_ul'] = map_info.get('y_ul', map_info.get('ul_y', 0))
|
||
info['pixel_x'] = map_info.get('pixel_x', map_info.get('pixel_size_x', 1))
|
||
info['pixel_y'] = map_info.get('pixel_y', map_info.get('pixel_size_y', 1))
|
||
info['utm_zone'] = str(map_info.get('zone', 51))
|
||
info['hemisphere'] = map_info.get('hemisphere', 'North')
|
||
info['datum'] = map_info.get('datum', 'WGS-84')
|
||
# 为兼容性设置x_start和y_start
|
||
info['x_start'] = info['x_ul']
|
||
info['y_start'] = info['y_ul']
|
||
else:
|
||
# 如果是字符串格式,手动解析
|
||
map_info_str = str(map_info)
|
||
# 移除花括号
|
||
map_info_str = map_info_str.strip('{}')
|
||
map_parts = map_info_str.split(',')
|
||
if len(map_parts) >= 9:
|
||
info['projection'] = map_parts[0].strip()
|
||
|
||
# ENVI map info 格式:
|
||
# [投影, ref_col, ref_row, ref_x, ref_y, pixel_x, pixel_y, zone, hemisphere, ...]
|
||
ref_col = float(map_parts[1].strip().strip("'\""))
|
||
ref_row = float(map_parts[2].strip().strip("'\""))
|
||
ref_x = float(map_parts[3].strip().strip("'\""))
|
||
ref_y = float(map_parts[4].strip().strip("'\""))
|
||
pixel_x = float(map_parts[5].strip().strip("'\""))
|
||
pixel_y = float(map_parts[6].strip().strip("'\""))
|
||
info['utm_zone'] = map_parts[7].strip().strip("'\"")
|
||
info['hemisphere'] = map_parts[8].strip().strip("'\"")
|
||
info['datum'] = map_parts[9].strip().strip("'\"") if len(map_parts) > 9 else 'WGS-84'
|
||
|
||
# 从参考像元回推出左上角像元左上角的坐标
|
||
x_ul = ref_x - (ref_col - 0.5) * pixel_x
|
||
y_ul = ref_y + (ref_row - 0.5) * pixel_y
|
||
|
||
# 保存解析结果
|
||
info['ref_col'] = ref_col
|
||
info['ref_row'] = ref_row
|
||
info['ref_x'] = ref_x
|
||
info['ref_y'] = ref_y
|
||
info['pixel_x'] = pixel_x
|
||
info['pixel_y'] = pixel_y
|
||
info['x_ul'] = x_ul
|
||
info['y_ul'] = y_ul
|
||
|
||
# 构建GDAL风格的仿射变换参数
|
||
# [x_ul, pixel_x, 0, y_ul, 0, -pixel_y]
|
||
info['geo_transform'] = [
|
||
info['x_ul'], # X坐标偏移
|
||
info['pixel_x'], # X像素大小
|
||
0.0, # X旋转(通常为0)
|
||
info['y_ul'], # Y坐标偏移
|
||
0.0, # Y旋转(通常为0)
|
||
-info['pixel_y'] # Y像素大小(负号表示Y方向向下)
|
||
]
|
||
|
||
print(f"图像信息: {info['samples']}x{info['lines']}x{info['bands']}像素")
|
||
print(f"数据类型: {info['data_type']} ({info['data_type_bytes']}字节)")
|
||
if 'utm_zone' in info:
|
||
print(f"地理参考: UTM Zone {info['utm_zone']}{info.get('hemisphere', 'N')}")
|
||
print(f"左上角坐标: ({info['x_ul']:.2f}, {info['y_ul']:.2f})")
|
||
print(f"像素大小: {info['pixel_x']}x{info['pixel_y']}米")
|
||
|
||
return info
|
||
|
||
def _read_bip_manual(self, bip_file: Path) -> Tuple[np.ndarray, Dict]:
|
||
"""
|
||
手动读取BIP格式图像文件(回退方法)
|
||
|
||
Args:
|
||
bip_file: BIP文件路径
|
||
|
||
Returns:
|
||
图像数据数组和图像信息字典
|
||
"""
|
||
print(f"手动读取BIP图像文件: {bip_file}")
|
||
|
||
# 读取HDR文件获取图像信息
|
||
hdr_file = bip_file.with_suffix('.hdr')
|
||
image_info = self._parse_envi_header(hdr_file)
|
||
|
||
# 计算文件大小验证
|
||
expected_size = (image_info['samples'] * image_info['lines'] *
|
||
image_info['bands'] * image_info['data_type_bytes'])
|
||
actual_size = bip_file.stat().st_size
|
||
|
||
if actual_size != expected_size:
|
||
raise ValueError(f"文件大小不匹配。期望: {expected_size} bytes, 实际: {actual_size} bytes")
|
||
|
||
# 读取BIP数据
|
||
print("读取图像数据...")
|
||
start_time = time.time()
|
||
|
||
# 根据数据类型设置numpy dtype
|
||
dtype_map = {
|
||
1: np.uint8, # Byte
|
||
2: np.int16, # Integer (2 bytes)
|
||
4: np.float32, # Float (4 bytes)
|
||
5: np.float64, # Double (8 bytes)
|
||
12: np.uint16, # Unsigned integer (2 bytes)
|
||
13: np.uint32, # Unsigned long (4 bytes)
|
||
}
|
||
|
||
dtype = dtype_map.get(image_info['data_type'], np.float32)
|
||
|
||
# 使用内存映射读取大文件
|
||
image_data = np.memmap(str(bip_file), dtype=dtype, mode='r',
|
||
shape=(image_info['lines'], image_info['samples'], image_info['bands']),
|
||
order='C') # BIP格式是按像素交织的
|
||
|
||
read_time = time.time() - start_time
|
||
print(f"图像数据读取完成,耗时: {read_time:.2f} 秒")
|
||
return image_data, image_info
|
||
|
||
def _parse_envi_header(self, hdr_file: Path) -> Dict:
|
||
"""
|
||
解析ENVI格式的HDR头文件
|
||
|
||
Args:
|
||
hdr_file: HDR文件路径
|
||
|
||
Returns:
|
||
图像信息字典
|
||
"""
|
||
info = {}
|
||
|
||
with open(hdr_file, 'r', encoding='utf-8', errors='ignore') as f:
|
||
content = f.read()
|
||
|
||
# 解析基本参数
|
||
lines = content.split('\n')
|
||
for line in lines:
|
||
line = line.strip()
|
||
if '=' in line:
|
||
key, value = line.split('=', 1)
|
||
key = key.strip().lower()
|
||
value = value.strip()
|
||
|
||
# 移除花括号和等号
|
||
value = value.strip('{}=')
|
||
|
||
if key == 'samples':
|
||
info['samples'] = int(value)
|
||
elif key == 'lines':
|
||
info['lines'] = int(value)
|
||
elif key == 'bands':
|
||
info['bands'] = int(value)
|
||
elif key == 'data type':
|
||
info['data_type'] = int(value)
|
||
elif key == 'byte order':
|
||
info['byte_order'] = int(value)
|
||
elif key == 'interleave':
|
||
info['interleave'] = value.lower()
|
||
elif key == 'data ignore value':
|
||
info['ignore_value'] = float(value) if '.' in value else int(value)
|
||
elif key == 'map info':
|
||
# 解析地图信息
|
||
map_parts = value.split(',')
|
||
if len(map_parts) >= 9:
|
||
info['projection'] = map_parts[0].strip()
|
||
|
||
# ENVI map info 格式:
|
||
# [投影, ref_col, ref_row, ref_x, ref_y, pixel_x, pixel_y, zone, hemisphere, ...]
|
||
# ref_col/row: 参考像元列/行号 (ENVI 1基, 通常为像元中心)
|
||
# ref_x/y: 该参考像元的地图坐标
|
||
ref_col = float(map_parts[1].strip())
|
||
ref_row = float(map_parts[2].strip())
|
||
ref_x = float(map_parts[3].strip())
|
||
ref_y = float(map_parts[4].strip())
|
||
pixel_x = float(map_parts[5].strip())
|
||
pixel_y = float(map_parts[6].strip())
|
||
info['utm_zone'] = map_parts[7].strip()
|
||
info['hemisphere'] = map_parts[8].strip()
|
||
info['datum'] = map_parts[9].strip() if len(map_parts) > 9 else 'WGS-84'
|
||
|
||
# 从参考像元回推出左上角像元左上角的坐标
|
||
# ENVI参考像元为1基且为中心坐标,GDAL原点为(0,0)像元左上角
|
||
x_ul = ref_x - (ref_col - 0.5) * pixel_x # 向西偏移得到左上角X
|
||
y_ul = ref_y + (ref_row - 0.5) * pixel_y # 向北偏移得到左上角Y
|
||
|
||
# 保存解析结果
|
||
info['ref_col'] = ref_col
|
||
info['ref_row'] = ref_row
|
||
info['ref_x'] = ref_x
|
||
info['ref_y'] = ref_y
|
||
info['pixel_x'] = pixel_x
|
||
info['pixel_y'] = pixel_y
|
||
info['x_ul'] = x_ul
|
||
info['y_ul'] = y_ul
|
||
|
||
# 构建GDAL风格的仿射变换参数
|
||
# [x_ul, pixel_x, 0, y_ul, 0, -pixel_y]
|
||
info['geo_transform'] = [
|
||
x_ul, # X坐标偏移 (左上角像元左上角X)
|
||
pixel_x, # X像素大小
|
||
0.0, # X旋转(通常为0)
|
||
y_ul, # Y坐标偏移 (左上角像元左上角Y)
|
||
0.0, # Y旋转(通常为0)
|
||
-pixel_y # Y像素大小(负号表示Y方向向下)
|
||
]
|
||
|
||
# 根据data type设置字节数
|
||
data_type_bytes = {
|
||
1: 1, # Byte
|
||
2: 2, # Integer
|
||
4: 4, # Float
|
||
5: 8, # Double
|
||
12: 2, # Unsigned integer
|
||
13: 4, # Unsigned long
|
||
}
|
||
info['data_type_bytes'] = data_type_bytes.get(info['data_type'], 4)
|
||
|
||
print(f"图像信息: {info['samples']}x{info['lines']}x{info['bands']}像素")
|
||
print(f"数据类型: {info['data_type']} ({info['data_type_bytes']}字节)")
|
||
print(f"地理参考: UTM Zone {info['utm_zone']}{info['hemisphere']}")
|
||
print(f"左上角坐标: ({info['x_ul']:.2f}, {info['y_ul']:.2f})")
|
||
print(f"像素大小: {info['pixel_x']}x{info['pixel_y']}米")
|
||
|
||
return info
|
||
|
||
def extract_pixel_info(self, image_data: np.ndarray, image_info: Dict) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
|
||
"""
|
||
从图像数据中提取像素信息
|
||
|
||
Args:
|
||
image_data: BIP图像数据
|
||
image_info: 图像信息
|
||
|
||
Returns:
|
||
列号、行号、航带号数组
|
||
"""
|
||
print("提取像素几何信息...")
|
||
|
||
# BIP格式:bands顺序为RGB + 列号 + 行号 + 航带号
|
||
# 波段3:列号,波段4:行号,波段5:航带号
|
||
col_indices = image_data[:, :, 3].astype(np.int32) # 列号
|
||
row_indices = image_data[:, :, 4].astype(np.int32) # 行号
|
||
swath_ids = image_data[:, :, 5].astype(np.int32) # 航带号
|
||
|
||
# 排除无效像素(所有波段值均为0的像素为填充像素)
|
||
valid_mask = np.any(image_data != 0, axis=2)
|
||
|
||
print(f"总像素数: {col_indices.size}")
|
||
print(f"有效像素数: {np.sum(valid_mask)}")
|
||
print(f"航带数量: {len(np.unique(swath_ids[valid_mask]))}")
|
||
|
||
return col_indices, row_indices, swath_ids
|
||
|
||
def load_gps_data(self, gps_file: Path) -> pd.DataFrame:
|
||
"""
|
||
加载GPS/IMU数据
|
||
|
||
Args:
|
||
gps_file: GPS数据文件路径
|
||
|
||
Returns:
|
||
GPS数据DataFrame
|
||
"""
|
||
print(f"加载GPS数据: {gps_file}")
|
||
|
||
# 读取GPS数据(空格分隔的文本文件)
|
||
gps_data = pd.read_csv(gps_file, sep=r'\s+', header=None, engine='python',
|
||
names=['Date', 'Time', 'roll', 'pitch', 'yaw',
|
||
'Lon', 'Lat', 'Height'])
|
||
|
||
# 合并日期时间并转换为datetime
|
||
gps_data['datetime'] = pd.to_datetime(gps_data['Date'] + ' ' + gps_data['Time'])
|
||
gps_data = gps_data.drop(columns=['Date', 'Time'])
|
||
|
||
# GPS数据已经是UTC时间,保持为普通datetime格式(无时区信息)
|
||
# 以便与times时间戳格式匹配
|
||
|
||
# 添加UTM坐标
|
||
utm_x, utm_y = self.transformer.transform(gps_data['Lon'].values, gps_data['Lat'].values)
|
||
gps_data['X_utm'] = utm_x
|
||
gps_data['Y_utm'] = utm_y
|
||
|
||
print(f"GPS数据加载完成,共 {len(gps_data)} 个记录")
|
||
print(f"时间范围: {gps_data['datetime'].min()} 到 {gps_data['datetime'].max()}")
|
||
print(f"位置范围: 经度 {gps_data['Lon'].min():.6f}°-{gps_data['Lon'].max():.6f}°, "
|
||
f"纬度 {gps_data['Lat'].min():.6f}°-{gps_data['Lat'].max():.6f}°")
|
||
|
||
return gps_data
|
||
|
||
def load_times_data(self, times_file: Path) -> np.ndarray:
|
||
"""
|
||
加载时间戳数据
|
||
|
||
Args:
|
||
times_file: times文件路径
|
||
|
||
Returns:
|
||
时间戳数组(周内秒)
|
||
"""
|
||
print(f"加载时间戳数据: {times_file}")
|
||
|
||
# 读取时间戳数据
|
||
times_data = np.loadtxt(times_file)
|
||
|
||
print(f"时间戳数据加载完成,共 {len(times_data)} 个记录")
|
||
print(f"时间范围: {times_data[0]:.6f} - {times_data[-1]:.6f} 秒")
|
||
|
||
return times_data
|
||
|
||
def convert_week_seconds_to_datetime(self, week_seconds: float, base_date: datetime) -> datetime:
|
||
"""
|
||
将GPS周内秒转换为绝对时间
|
||
|
||
Args:
|
||
week_seconds: GPS周内秒
|
||
base_date: 基准日期
|
||
|
||
Returns:
|
||
绝对时间
|
||
"""
|
||
# 找到基准日期所在周的周日0点
|
||
days_since_sunday = base_date.weekday() + 1 if base_date.weekday() != 6 else 0
|
||
week_start = base_date - timedelta(days=days_since_sunday)
|
||
week_start = datetime(week_start.year, week_start.month, week_start.day, 0, 0, 0)
|
||
|
||
# 周日起点 + 周内秒 = 绝对时间
|
||
absolute_time = week_start + timedelta(seconds=week_seconds)
|
||
|
||
return absolute_time
|
||
|
||
|
||
|
||
|
||
|
||
def get_pixel_coordinates(self, col_idx: int, row_idx: int, swath_id: int,
|
||
image_info: Dict) -> Tuple[float, float]:
|
||
"""
|
||
根据像素行列号计算像素的UTM坐标
|
||
|
||
使用从ENVI头文件中读取的GDAL仿射变换参数进行坐标变换计算
|
||
|
||
Args:
|
||
col_idx: 当前图像中的列号(像素列索引)
|
||
row_idx: 当前图像中的行号(像素行索引)
|
||
swath_id: 航带号
|
||
image_info: 图像信息(包含geo_transform)
|
||
|
||
Returns:
|
||
UTM坐标 (X, Y)
|
||
"""
|
||
# 从头文件中获取仿射变换参数
|
||
geo_transform = image_info['geo_transform']
|
||
|
||
# 使用GDAL标准的像素到地理坐标变换公式
|
||
pixel_x = geo_transform[0] + col_idx * geo_transform[1] + row_idx * geo_transform[2]
|
||
pixel_y = geo_transform[3] + col_idx * geo_transform[4] + row_idx * geo_transform[5]
|
||
|
||
return pixel_x, pixel_y
|
||
|
||
def _precompute_pixel_coordinates(self, image_info: Dict, valid_pixels: np.ndarray) -> np.ndarray:
|
||
"""
|
||
预计算所有有效像素的UTM坐标(向量化)- 参照raster_to_points.py的实现
|
||
|
||
Args:
|
||
image_info: 图像信息,包含geo_transform
|
||
valid_pixels: 有效像素位置数组 (N, 2) - [row, col]
|
||
|
||
Returns:
|
||
UTM坐标数组 (N, 2) - [x, y]
|
||
"""
|
||
if len(valid_pixels) == 0:
|
||
return np.empty((0, 2), dtype=np.float64)
|
||
|
||
geo_transform = image_info['geo_transform']
|
||
rows, cols = valid_pixels[:, 0], valid_pixels[:, 1]
|
||
|
||
# 使用GDAL标准的像素到地理坐标变换公式
|
||
# pixel_x = geo_transform[0] + col * geo_transform[1] + row * geo_transform[2]
|
||
# pixel_y = geo_transform[3] + col * geo_transform[4] + row * geo_transform[5]
|
||
pixel_x = geo_transform[0] + cols * geo_transform[1] + rows * geo_transform[2]
|
||
pixel_y = geo_transform[3] + cols * geo_transform[4] + rows * geo_transform[5]
|
||
|
||
return np.column_stack([pixel_x, pixel_y])
|
||
|
||
def _process_pixels_chunked_optimized(self, valid_pixels: np.ndarray, col_indices: np.ndarray,
|
||
row_indices: np.ndarray, swath_ids: np.ndarray,
|
||
times_data_cache: Dict, base_date: datetime,
|
||
image_info: Dict) -> np.ndarray:
|
||
"""
|
||
优化的处理版本 - 使用主数据表和最近邻GPS匹配,实现高性能向量化计算
|
||
"""
|
||
total_start_time = time.time()
|
||
|
||
angle_results = np.full((image_info['lines'], image_info['samples'], 5), np.nan, dtype=np.float32)
|
||
total_pixels = len(valid_pixels)
|
||
|
||
print(f"开始构建主数据表,共 {total_pixels} 个有效像素")
|
||
build_start = time.time()
|
||
|
||
# 构建主数据表 (Master Table)
|
||
master_table = self._build_master_table(valid_pixels, col_indices, row_indices, swath_ids,
|
||
times_data_cache, base_date, image_info)
|
||
|
||
build_time = time.time() - build_start
|
||
print(".2f")
|
||
if len(master_table) == 0:
|
||
print("警告: 没有有效的像素数据")
|
||
return angle_results
|
||
|
||
print(f"主数据表构建完成,包含 {len(master_table)} 条记录")
|
||
|
||
# 一次性向量化角度计算
|
||
print("开始向量化角度计算...")
|
||
angle_calc_start = time.time()
|
||
|
||
if HAS_TQDM:
|
||
# 使用tqdm显示进度,但由于是向量化计算,我们用一个简单的进度指示
|
||
print(f"正在计算 {len(master_table)} 个像素的角度...")
|
||
angles = self._calculate_angles_vectorized_from_master_table(master_table, base_date)
|
||
else:
|
||
angles = self._calculate_angles_vectorized_from_master_table(master_table, base_date)
|
||
|
||
angle_calc_time = time.time() - angle_calc_start
|
||
print(".2f")
|
||
# 将结果写回图像矩阵
|
||
print("将结果写回图像矩阵...")
|
||
write_back_start = time.time()
|
||
|
||
if HAS_TQDM:
|
||
from tqdm import tqdm
|
||
for i, (idx, row) in enumerate(tqdm(master_table.iterrows(), desc="写回结果", unit="pixel")):
|
||
img_row, img_col = int(row['img_row']), int(row['img_col'])
|
||
angle_results[img_row, img_col] = angles[i]
|
||
else:
|
||
for i, (idx, row) in enumerate(master_table.iterrows()):
|
||
img_row, img_col = int(row['img_row']), int(row['img_col'])
|
||
angle_results[img_row, img_col] = angles[i]
|
||
|
||
write_back_time = time.time() - write_back_start
|
||
print(".2f")
|
||
total_time = time.time() - total_start_time
|
||
print(".2f")
|
||
print("角度计算完成")
|
||
|
||
return angle_results
|
||
|
||
def _build_master_table(self, valid_pixels: np.ndarray, col_indices: np.ndarray,
|
||
row_indices: np.ndarray, swath_ids: np.ndarray,
|
||
times_data_cache: Dict, base_date: datetime,
|
||
image_info: Dict) -> pd.DataFrame:
|
||
"""
|
||
构建主数据表,包含像素UTM坐标、行列号、航带号和匹配的GPS数据
|
||
|
||
Args:
|
||
valid_pixels: 有效像素位置数组 (N, 2) - [row, col]
|
||
col_indices: 列号数组
|
||
row_indices: 行号数组
|
||
swath_ids: 航带号数组
|
||
times_data_cache: times数据缓存
|
||
base_date: 基准日期
|
||
image_info: 图像信息
|
||
|
||
Returns:
|
||
主数据表DataFrame
|
||
"""
|
||
# 预计算所有像素的UTM坐标
|
||
pixel_coords = self._precompute_pixel_coordinates(image_info, valid_pixels)
|
||
|
||
# 构建基础数据
|
||
master_data = []
|
||
|
||
for idx, (img_row, img_col) in enumerate(valid_pixels):
|
||
img_row = int(img_row) # 转换为 Python int
|
||
img_col = int(img_col) # 转换为 Python int
|
||
# 使用 .item() 安全地提取标量值
|
||
orig_col = int(col_indices[img_row, img_col].item())
|
||
orig_row = int(row_indices[img_row, img_col].item())
|
||
swath_id = int(swath_ids[img_row, img_col].item())
|
||
|
||
# 检查航带有对应的times数据
|
||
if swath_id not in times_data_cache:
|
||
continue
|
||
|
||
times_data = times_data_cache[swath_id]
|
||
|
||
# 检查行号是否有效
|
||
if orig_row >= len(times_data) - 1:
|
||
continue
|
||
|
||
# 计算像素UTM坐标
|
||
pixel_x, pixel_y = pixel_coords[idx]
|
||
|
||
# 计算该行的采集时间(周内秒)- 使用行开始时间
|
||
t_center = times_data[orig_row]
|
||
|
||
# 转换为绝对时间
|
||
absolute_time = self.convert_week_seconds_to_datetime(t_center, base_date)
|
||
|
||
master_data.append({
|
||
'img_row': img_row,
|
||
'img_col': img_col,
|
||
'pixel_x': pixel_x,
|
||
'pixel_y': pixel_y,
|
||
'col_idx': orig_col,
|
||
'row_idx': orig_row,
|
||
'swath_id': swath_id,
|
||
'timestamp': absolute_time,
|
||
'week_seconds': t_center
|
||
})
|
||
|
||
# 转换为DataFrame
|
||
master_df = pd.DataFrame(master_data)
|
||
|
||
if len(master_df) == 0:
|
||
return master_df
|
||
|
||
# 使用最近邻匹配找到对应的GPS数据
|
||
print("进行GPS数据最近邻匹配...")
|
||
|
||
# 为了提高效率,我们对每个唯一的 (swath_id, row_idx) 组合进行一次GPS匹配
|
||
# 因为同一行的所有像素具有相同的时间戳
|
||
unique_time_rows = master_df[['swath_id', 'row_idx', 'timestamp']].drop_duplicates()
|
||
|
||
# GPS最近邻匹配
|
||
gps_matched = pd.merge_asof(
|
||
unique_time_rows.sort_values('timestamp'),
|
||
self.gps_data.sort_values('datetime')[['datetime', 'X_utm', 'Y_utm', 'Height', 'Lon', 'Lat']],
|
||
left_on='timestamp',
|
||
right_on='datetime',
|
||
direction='nearest'
|
||
)
|
||
|
||
# 重命名GPS列
|
||
gps_matched = gps_matched.rename(columns={
|
||
'X_utm': 'sensor_x',
|
||
'Y_utm': 'sensor_y',
|
||
'Height': 'sensor_height',
|
||
'Lon': 'sensor_lon',
|
||
'Lat': 'sensor_lat'
|
||
})
|
||
|
||
# 计算相对高程
|
||
gps_matched['relative_height'] = gps_matched['sensor_height'] - self.base_height
|
||
|
||
# 合并回主数据表
|
||
master_df = master_df.merge(
|
||
gps_matched[['swath_id', 'row_idx', 'sensor_x', 'sensor_y', 'sensor_height',
|
||
'relative_height', 'sensor_lon', 'sensor_lat']],
|
||
on=['swath_id', 'row_idx'],
|
||
how='left'
|
||
)
|
||
|
||
# 添加本地时间(从UTC转换为北京时间)
|
||
master_df['local_time'] = master_df['timestamp'].dt.tz_localize('UTC').dt.tz_convert('Asia/Shanghai')
|
||
|
||
print(f"GPS匹配完成,共匹配 {len(master_df)} 条记录")
|
||
|
||
return master_df
|
||
|
||
def _calculate_angles_vectorized_from_master_table(self, master_chunk: pd.DataFrame,
|
||
base_date: datetime) -> np.ndarray:
|
||
"""
|
||
从主数据表向量化计算角度
|
||
|
||
Args:
|
||
master_chunk: 主数据表的一个数据块
|
||
base_date: 基准日期
|
||
|
||
Returns:
|
||
角度数组 (N, 5) - [SZA, SAA, VZA, VAA, RAA]
|
||
"""
|
||
n_pixels = len(master_chunk)
|
||
angles = np.empty((n_pixels, 3), dtype=np.float32)
|
||
|
||
# 批量计算太阳角度
|
||
solar_angles = self._calculate_solar_angles_batch(
|
||
master_chunk['local_time'].values,
|
||
master_chunk['sensor_lon'].values,
|
||
master_chunk['sensor_lat'].values
|
||
)
|
||
|
||
# 向量化计算传感器角度
|
||
# 提取所有必要的坐标数据
|
||
sensor_x = master_chunk['sensor_x'].values
|
||
sensor_y = master_chunk['sensor_y'].values
|
||
sensor_z = master_chunk['relative_height'].values
|
||
pixel_x = master_chunk['pixel_x'].values
|
||
pixel_y = master_chunk['pixel_y'].values
|
||
|
||
# 批量计算传感器天顶角 (VZA)
|
||
horizontal_dist = np.sqrt((sensor_x - pixel_x) ** 2 + (sensor_y - pixel_y) ** 2)
|
||
with np.errstate(divide='ignore', invalid='ignore'):
|
||
vza = 90.0 - np.degrees(np.arctan(sensor_z / horizontal_dist))
|
||
# 约束到 0~90(从天顶的夹角)
|
||
vza = np.clip(vza, 0.0, 90.0)
|
||
|
||
# 批量计算观测方位角 (VAA)
|
||
dx = sensor_x - pixel_x # 东方向差
|
||
dy = sensor_y - pixel_y # 北方向差
|
||
angle_rad = np.arctan2(dx, dy)
|
||
vaa = np.degrees(angle_rad) % 360.0
|
||
|
||
# 批量计算相对方位角 (RAA)
|
||
saa_array = solar_angles[:, 1] # 太阳方位角数组
|
||
diff = np.abs(saa_array - vaa)
|
||
raa = np.minimum(diff, 360 - diff)
|
||
|
||
# 组合所有角度
|
||
angles = np.column_stack([solar_angles[:, 0], solar_angles[:, 1], vza, vaa, raa]) # [SZA, SAA, VZA, VAA, RAA]
|
||
|
||
return angles
|
||
|
||
def _calculate_solar_angles_batch(self, datetime_array, lon_array, lat_array) -> np.ndarray:
|
||
"""
|
||
批量计算太阳角度(完全向量化版本)
|
||
|
||
Args:
|
||
datetime_array: 时间戳数组
|
||
lon_array: 经度数组
|
||
lat_array: 纬度数组
|
||
|
||
Returns:
|
||
太阳角度数组 (N, 2) - [SZA, SAA]
|
||
"""
|
||
n_points = len(datetime_array)
|
||
|
||
# 创建DatetimeIndex数组,转换为北京时间
|
||
datetime_idx = pd.DatetimeIndex(datetime_array).tz_localize('UTC').tz_convert('Asia/Shanghai')
|
||
|
||
# 批量计算时角方程
|
||
equation_of_time = solarposition.equation_of_time_spencer71(datetime_idx.dayofyear)
|
||
|
||
# 批量计算时角
|
||
hour_angle = solarposition.hour_angle(datetime_idx, lon_array, equation_of_time)
|
||
|
||
# 批量计算太阳赤纬
|
||
declination = solarposition.declination_cooper69(datetime_idx.dayofyear)
|
||
|
||
# 批量计算太阳天顶角
|
||
lat_rad = np.radians(lat_array)
|
||
hour_angle_rad = np.radians(hour_angle)
|
||
declination_rad = np.radians(declination)
|
||
sza_rad = solarposition.solar_zenith_analytical(lat_rad, hour_angle_rad, declination_rad)
|
||
|
||
# 批量计算太阳方位角
|
||
saa_rad = solarposition.solar_azimuth_analytical(lat_rad, hour_angle_rad, declination_rad, sza_rad)
|
||
|
||
# 转换为度
|
||
sza_deg = np.degrees(sza_rad)
|
||
# 约束太阳天顶角到 0~90(太阳在地平线以下时会 >90,这里按需求裁剪)
|
||
sza_deg = np.clip(sza_deg, 0.0, 90.0)
|
||
|
||
saa_deg = np.degrees(saa_rad) % 360.0
|
||
|
||
# 返回角度数组
|
||
return np.column_stack([sza_deg, saa_deg])
|
||
|
||
|
||
def _extract_swath_name_from_bip_filename(self, bip_file: Path) -> str:
|
||
"""
|
||
从输入的 BIP/HDR 文件名中提取“航带名称”(仅包含数字与下划线的前缀)。
|
||
|
||
示例:
|
||
2025_9_2_3_53_45_202592_35252_0_rad_rgbxyz_geo_registered.bip
|
||
-> 2025_9_2_3_53_45_202592_35252_0
|
||
"""
|
||
name = bip_file.name
|
||
base = name
|
||
lower = base.lower()
|
||
if lower.endswith('.bip.hdr'):
|
||
base = base[:-len('.bip.hdr')]
|
||
elif lower.endswith('.hdr'):
|
||
base = base[:-len('.hdr')]
|
||
elif lower.endswith('.bip'):
|
||
base = base[:-len('.bip')]
|
||
|
||
match = re.match(r'^(\d+(?:_\d+)*)', base)
|
||
if match:
|
||
return match.group(1)
|
||
|
||
# 兜底:常见格式会在 _rad... 之前出现航带名
|
||
if '_rad' in base:
|
||
return base.split('_rad', 1)[0]
|
||
return Path(base).stem
|
||
|
||
def _find_times_file_for_bip(self, bip_file: Path, times_root_dir: Optional[Path] = None) -> Optional[Path]:
|
||
"""
|
||
根据输入的 BIP/HDR 文件名解析“航带名称”,并在 times 根目录下查找对应的 times 文件。
|
||
|
||
期望目录结构:
|
||
times_root_dir/
|
||
航带名称/
|
||
航带名称.bil.times
|
||
|
||
Args:
|
||
bip_file: 输入的 BIP/HDR 文件
|
||
times_root_dir: times 根目录(可选,优先于配置)
|
||
|
||
Returns:
|
||
times文件路径,如果找不到则返回None
|
||
"""
|
||
root = Path(times_root_dir) if times_root_dir is not None else self.times_root_dir
|
||
|
||
if not root.exists():
|
||
print(f"警告: times根目录不存在 {root}")
|
||
return None
|
||
|
||
swath_name = self._extract_swath_name_from_bip_filename(bip_file)
|
||
expected = root / swath_name / f"{swath_name}.bil.times"
|
||
if expected.exists():
|
||
return expected
|
||
|
||
# 兼容:旧结构可能直接在根目录下
|
||
flat = root / f"{swath_name}.bil.times"
|
||
if flat.exists():
|
||
return flat
|
||
|
||
# 再兜底:在子目录中搜索同名 times 文件
|
||
try:
|
||
for p in root.glob(f"*/{swath_name}.bil.times"):
|
||
return p
|
||
for p in root.rglob(f"{swath_name}.bil.times"):
|
||
return p
|
||
except Exception as e:
|
||
print(f"警告: 搜索times文件失败: {e}")
|
||
|
||
return None
|
||
|
||
def _parse_base_date_from_times_filename(self, times_filename: str) -> datetime:
|
||
"""
|
||
从times文件名中解析基准日期
|
||
|
||
Args:
|
||
times_filename: times文件名(不含扩展名)
|
||
|
||
Returns:
|
||
基准日期时间
|
||
"""
|
||
# 示例文件名: 2025_9_2_3_11_52_202592_31037_2
|
||
# 格式: YYYY_M_D_H_M_S_...
|
||
|
||
# 去掉扩展名并分割
|
||
parts = times_filename.replace('.bil.times', '').split('_')
|
||
|
||
if len(parts) >= 6:
|
||
try:
|
||
year, month, day, hour, minute, second = map(int, parts[:6])
|
||
return datetime(year, month, day, hour, minute, second)
|
||
except (ValueError, IndexError) as e:
|
||
print(f"警告: 无法从文件名解析日期 {times_filename}: {e}")
|
||
# 返回默认日期
|
||
return datetime(2025, 9, 2, 3, 11, 52)
|
||
|
||
print(f"警告: 文件名格式不正确 {times_filename}")
|
||
return datetime(2025, 9, 2, 3, 11, 52)
|
||
|
||
def process_image(self, bip_file: Path, gps_file: Path, times_root_dir: Optional[Path] = None) -> np.ndarray:
|
||
"""
|
||
处理单个BIP图像,计算所有像素的角度
|
||
|
||
对图像的每一个有效像素(非全零像素)进行角度计算:
|
||
- 使用像素在图像中的行列号计算UTM坐标
|
||
- 使用波段4、5、6的信息确定时间和传感器状态
|
||
- 计算太阳天顶角、太阳方位角、传感器天顶角、传感器方位角、相对方位角
|
||
|
||
Args:
|
||
bip_file: BIP图像文件路径
|
||
gps_file: GPS数据文件路径
|
||
times_root_dir: times根目录(可选,优先于配置)
|
||
|
||
Returns:
|
||
角度结果数组 (lines, samples, 5) - [SZA, SAA, VZA, VAA, RAA]
|
||
"""
|
||
print(f"\n开始处理图像: {bip_file.name}")
|
||
start_time = time.time()
|
||
|
||
# 1. 读取图像数据
|
||
image_data, image_info = self.read_bip_image(bip_file)
|
||
self.image_info = image_info
|
||
self.image_data = image_data
|
||
|
||
# 1.5 预处理优化:提前过滤无效像素
|
||
print("预处理:识别有效像素...")
|
||
valid_mask = np.any(image_data != 0, axis=2) # 所有波段都不为0的像素
|
||
valid_pixels = np.column_stack(np.where(valid_mask)) # (N, 2) 数组
|
||
print(
|
||
f"发现 {len(valid_pixels)} 个有效像素,约占总像素的 {len(valid_pixels) / (image_info['lines'] * image_info['samples']) * 100:.1f}%")
|
||
|
||
# 2. 提取像素信息
|
||
col_indices, row_indices, swath_ids = self.extract_pixel_info(image_data, image_info)
|
||
|
||
# 3. 加载GPS数据
|
||
if self.gps_data is None:
|
||
self.gps_data = self.load_gps_data(gps_file)
|
||
|
||
# 4. 获取唯一的航带ID
|
||
unique_swath_ids = np.unique(swath_ids[swath_ids > 0])
|
||
print(f"发现航带: {unique_swath_ids}")
|
||
|
||
# 5. 从该 BIP/HDR 对应的 times 文件解析基准日期
|
||
base_date = None
|
||
if len(unique_swath_ids) > 0:
|
||
times_file = self._find_times_file_for_bip(bip_file, times_root_dir=times_root_dir)
|
||
if not times_file:
|
||
raise ValueError("无法找到该BIP对应的times文件")
|
||
base_date = self._parse_base_date_from_times_filename(times_file.stem)
|
||
print(f"从times文件解析的基准日期: {base_date}")
|
||
else:
|
||
raise ValueError("未识别到有效的航带ID,无法关联times文件")
|
||
|
||
# 6. 初始化结果数组
|
||
result_shape = (image_info['lines'], image_info['samples'], 5) # SZA, SAA, VZA, VAA, RAA
|
||
angle_results = np.full(result_shape, np.nan, dtype=np.float32)
|
||
|
||
# 7. 创建航带到times数据的映射
|
||
# 说明:按需求,times 文件由“输入 BIP/HDR 的航带名”唯一确定;若 swath_id 出现多个值,统一使用同一份 times 数据
|
||
times_data_cache = {}
|
||
times_file = self._find_times_file_for_bip(bip_file, times_root_dir=times_root_dir)
|
||
if not times_file:
|
||
raise ValueError("无法找到该BIP对应的times文件")
|
||
if str(times_file) not in self.times_data:
|
||
self.times_data[str(times_file)] = self.load_times_data(times_file)
|
||
shared_times_data = self.times_data[str(times_file)]
|
||
for swath_id in unique_swath_ids:
|
||
times_data_cache[swath_id] = shared_times_data
|
||
|
||
# 8. 对图像的每一个像素进行处理(支持并行处理)
|
||
total_pixels = image_info['lines'] * image_info['samples']
|
||
|
||
|
||
# 使用优化的处理(向量化和纯NumPy实现)
|
||
print(f"开始计算每个像素的角度... 处理 {len(valid_pixels)} 个有效像素")
|
||
print("使用优化的向量化处理模式(纯NumPy实现)")
|
||
|
||
angle_results = self._process_pixels_chunked_optimized(
|
||
valid_pixels, col_indices, row_indices, swath_ids,
|
||
times_data_cache, base_date, image_info
|
||
)
|
||
|
||
total_time = time.time() - start_time
|
||
processed_pixels = np.sum(~np.isnan(angle_results[:, :, 0]))
|
||
print(f"\n图像处理完成,共处理 {processed_pixels} 个有效像素,总耗时: {total_time:.2f} 秒")
|
||
print(f"平均处理速度: {processed_pixels / total_time:.0f} 像素/秒")
|
||
return angle_results
|
||
|
||
def save_bil_results(self, angle_results: np.ndarray, output_file: Path, image_info: Dict, original_bip_file: Path = None):
|
||
"""
|
||
将角度结果保存为ENVI BSQ格式(使用GDAL),HDR文件基于原始图像
|
||
|
||
Args:
|
||
angle_results: 角度结果数组 (lines, samples, 3)
|
||
output_file: 输出文件路径
|
||
image_info: 原始图像信息
|
||
original_bip_file: 原始BIP文件路径,用于复制HDR文件
|
||
"""
|
||
print(f"使用GDAL保存结果为ENVI BSQ格式: {output_file}")
|
||
|
||
from osgeo import gdal, osr
|
||
|
||
# 获取图像尺寸
|
||
lines, samples, bands = angle_results.shape
|
||
assert bands == 5, "角度结果必须有5个波段"
|
||
|
||
# 创建GDAL驱动
|
||
driver = gdal.GetDriverByName('ENVI')
|
||
|
||
# 创建输出数据集
|
||
dataset = driver.Create(str(output_file), samples, lines, bands, gdal.GDT_Float32,
|
||
options=['INTERLEAVE=BSQ'])
|
||
|
||
if dataset is None:
|
||
raise RuntimeError(f"无法创建输出文件: {output_file}")
|
||
|
||
# 设置地理变换参数(如果有的话)
|
||
if 'geo_transform' in image_info:
|
||
dataset.SetGeoTransform(image_info['geo_transform'])
|
||
|
||
# 设置投影信息
|
||
if 'projection' in image_info:
|
||
srs = osr.SpatialReference()
|
||
# 如果是WKT格式
|
||
if isinstance(image_info['projection'], str) and 'PROJCS' in image_info['projection']:
|
||
srs.ImportFromWkt(image_info['projection'])
|
||
else:
|
||
# 尝试设置为UTM
|
||
utm_zone = image_info.get('utm_zone', 51)
|
||
if isinstance(utm_zone, str):
|
||
utm_zone = int(''.join(filter(str.isdigit, utm_zone)))
|
||
hemisphere = image_info.get('hemisphere', 'North')
|
||
|
||
if hemisphere.upper().startswith('N'):
|
||
epsg_code = 32600 + utm_zone
|
||
else:
|
||
epsg_code = 32700 + utm_zone
|
||
|
||
srs.ImportFromEPSG(epsg_code)
|
||
|
||
dataset.SetProjection(srs.ExportToWkt())
|
||
|
||
# 设置波段数据和元数据
|
||
band_names = ['Solar Zenith Angle (SZA)', 'Solar Azimuth Angle (SAA)', 'Sensor Zenith Angle (VZA)', 'Sensor Azimuth Angle (VAA)', 'Relative Azimuth Angle (RAA)']
|
||
|
||
for band_idx in range(bands):
|
||
band = dataset.GetRasterBand(band_idx + 1)
|
||
band_data = angle_results[:, :, band_idx]
|
||
|
||
# 处理NaN值
|
||
band_data = np.nan_to_num(band_data, nan=0)
|
||
|
||
# 写入数据
|
||
band.WriteArray(band_data)
|
||
band.SetNoDataValue(0)
|
||
|
||
# 设置波段描述
|
||
band.SetDescription(band_names[band_idx])
|
||
|
||
# 设置数据集级别的元数据
|
||
dataset.SetMetadataItem('description', 'Angle calculation results for UAV hyperspectral imagery')
|
||
dataset.SetMetadataItem('band_names', '{' + ', '.join(band_names) + '}')
|
||
|
||
# 清理资源
|
||
dataset.FlushCache()
|
||
dataset = None
|
||
|
||
# GDAL会自动生成HDR文件,现在我们用基于原始HDR的自定义HDR文件覆盖它
|
||
self._create_hdr_from_original(output_file, original_bip_file, bands, band_names)
|
||
|
||
print("结果保存完成")
|
||
|
||
def save_bip_with_angles(self, angle_results: np.ndarray, output_file: Path,
|
||
image_info: Dict, original_bip_file: Path):
|
||
"""
|
||
将角度结果追加到原始BIP数据后面,保存为新的BIP文件
|
||
"""
|
||
print(f"保存带角度的BIP文件: {output_file}")
|
||
|
||
from osgeo import gdal, osr
|
||
|
||
if self.image_data is None or self.image_info is None:
|
||
self.image_data, self.image_info = self.read_bip_image(original_bip_file)
|
||
|
||
lines, samples, orig_bands = self.image_data.shape
|
||
angle_bands = angle_results.shape[2]
|
||
total_bands = orig_bands + angle_bands
|
||
|
||
driver = gdal.GetDriverByName('ENVI')
|
||
dataset = driver.Create(str(output_file), samples, lines, total_bands, gdal.GDT_Float32,
|
||
options=['INTERLEAVE=BIP'])
|
||
if dataset is None:
|
||
raise RuntimeError(f"无法创建输出文件: {output_file}")
|
||
|
||
if 'geo_transform' in image_info:
|
||
dataset.SetGeoTransform(image_info['geo_transform'])
|
||
|
||
if 'projection' in image_info:
|
||
srs = osr.SpatialReference()
|
||
if isinstance(image_info['projection'], str) and 'PROJCS' in image_info['projection']:
|
||
srs.ImportFromWkt(image_info['projection'])
|
||
else:
|
||
utm_zone = image_info.get('utm_zone', 51)
|
||
if isinstance(utm_zone, str):
|
||
utm_zone = int(''.join(filter(str.isdigit, utm_zone)))
|
||
hemisphere = image_info.get('hemisphere', 'North')
|
||
epsg_code = 32600 + utm_zone if hemisphere.upper().startswith('N') else 32700 + utm_zone
|
||
srs.ImportFromEPSG(epsg_code)
|
||
dataset.SetProjection(srs.ExportToWkt())
|
||
|
||
# 写入原始波段
|
||
original_data = self.image_data.astype(np.float32, copy=False)
|
||
for band_idx in range(orig_bands):
|
||
band = dataset.GetRasterBand(band_idx + 1)
|
||
band.WriteArray(original_data[:, :, band_idx])
|
||
|
||
# 写入角度波段
|
||
angle_band_names = [
|
||
'Solar Zenith Angle (SZA)',
|
||
'Solar Azimuth Angle (SAA)',
|
||
'Sensor Zenith Angle (VZA)',
|
||
'Sensor Azimuth Angle (VAA)',
|
||
'Relative Azimuth Angle (RAA)'
|
||
]
|
||
for idx in range(angle_bands):
|
||
band = dataset.GetRasterBand(orig_bands + idx + 1)
|
||
band_data = np.nan_to_num(angle_results[:, :, idx], nan=0)
|
||
band.WriteArray(band_data)
|
||
band.SetNoDataValue(0)
|
||
if idx < len(angle_band_names):
|
||
band.SetDescription(angle_band_names[idx])
|
||
|
||
dataset.FlushCache()
|
||
dataset = None
|
||
|
||
# 生成HDR,保留原始band names并追加角度band names
|
||
if original_bip_file is not None:
|
||
hdr_file = original_bip_file if original_bip_file.suffix.lower() == '.hdr' else original_bip_file.with_suffix('.hdr')
|
||
orig_names = self._extract_band_names_from_hdr(hdr_file, orig_bands)
|
||
else:
|
||
orig_names = [f'Band {i + 1}' for i in range(orig_bands)]
|
||
|
||
appended_names = orig_names + angle_band_names[:angle_bands]
|
||
self._create_hdr_from_original(output_file, original_bip_file, total_bands, appended_names)
|
||
|
||
def _create_hdr_from_original(self, output_file: Path, original_bip_file: Path, bands: int, band_names: list):
|
||
"""
|
||
基于原始HDR文件创建新的HDR文件
|
||
|
||
Args:
|
||
output_file: 输出文件路径
|
||
original_bip_file: 原始BIP文件路径
|
||
bands: 输出文件的波段数
|
||
band_names: 波段名称列表
|
||
"""
|
||
# 查找原始HDR文件
|
||
if original_bip_file is None:
|
||
print(f"警告: 未提供原始文件路径,将创建基本的HDR文件")
|
||
return
|
||
|
||
# 如果输入已经是hdr文件
|
||
if original_bip_file.suffix.lower() == '.hdr':
|
||
original_hdr_file = original_bip_file
|
||
else:
|
||
# 尝试不同的HDR文件命名方式
|
||
original_hdr_file = original_bip_file.with_suffix('.hdr')
|
||
if not original_hdr_file.exists():
|
||
original_hdr_file = original_bip_file.with_suffix('.bip.hdr')
|
||
if not original_hdr_file.exists():
|
||
original_hdr_file = Path(str(original_bip_file) + '.hdr')
|
||
|
||
if not original_hdr_file.exists():
|
||
print(f"警告: 找不到原始HDR文件 {original_hdr_file},将创建基本的HDR文件")
|
||
self._create_basic_hdr_file(output_file, bands, band_names)
|
||
return
|
||
|
||
output_hdr_file = output_file.with_suffix('.hdr')
|
||
|
||
print(f"基于原始HDR文件 {original_hdr_file} 创建新的HDR文件")
|
||
|
||
try:
|
||
with open(original_hdr_file, 'r', encoding='utf-8', errors='ignore') as f:
|
||
hdr_content = f.read()
|
||
|
||
# 分割为行
|
||
lines = hdr_content.split('\n')
|
||
|
||
# 修改相关参数
|
||
modified_lines = []
|
||
skip_wavelength = False
|
||
|
||
for line in lines:
|
||
line_lower = line.lower().strip()
|
||
|
||
# 修改波段数
|
||
if line_lower.startswith('bands'):
|
||
modified_lines.append(f'bands = {bands}')
|
||
continue
|
||
|
||
# 修改数据类型为Float32 (4)
|
||
if line_lower.startswith('data type'):
|
||
modified_lines.append('data type = 4')
|
||
continue
|
||
|
||
# 添加或修改data ignore value
|
||
if line_lower.startswith('data ignore value'):
|
||
modified_lines.append('data ignore value = 0')
|
||
continue
|
||
|
||
# 修改交织方式为BSQ
|
||
if line_lower.startswith('interleave'):
|
||
modified_lines.append('interleave = bip')
|
||
continue
|
||
|
||
# 跳过wavelength相关行
|
||
if line_lower.startswith('wavelength') or line_lower.startswith('wavelength units'):
|
||
skip_wavelength = True
|
||
continue
|
||
elif skip_wavelength and (line.strip() == '' or line.strip().startswith('}')):
|
||
skip_wavelength = False
|
||
continue
|
||
elif skip_wavelength:
|
||
continue
|
||
|
||
# 跳过band names(后面会重新添加)
|
||
if line_lower.startswith('band names'):
|
||
continue
|
||
|
||
modified_lines.append(line)
|
||
|
||
# 添加新的band names
|
||
modified_lines.insert(-1, f'band names = {{')
|
||
for i, name in enumerate(band_names):
|
||
if i < len(band_names) - 1:
|
||
modified_lines.insert(-1, f' {name},')
|
||
else:
|
||
modified_lines.insert(-1, f' {name}')
|
||
modified_lines.insert(-1, '}')
|
||
|
||
# 更新description
|
||
for i, line in enumerate(modified_lines):
|
||
if line.lower().strip().startswith('description'):
|
||
modified_lines[i] = 'description = {Angle calculation results for UAV hyperspectral imagery}'
|
||
break
|
||
|
||
# 写回HDR文件
|
||
with open(output_hdr_file, 'w', encoding='utf-8') as f:
|
||
f.write('\n'.join(modified_lines))
|
||
|
||
print(f"HDR文件创建完成: {output_hdr_file}")
|
||
|
||
except Exception as e:
|
||
print(f"复制HDR文件失败: {e},将创建基本的HDR文件")
|
||
self._create_basic_hdr_file(output_file, bands, band_names)
|
||
|
||
def _create_basic_hdr_file(self, output_file: Path, bands: int, band_names: list):
|
||
"""
|
||
创建基本的HDR文件
|
||
|
||
Args:
|
||
output_file: 输出文件路径
|
||
bands: 波段数
|
||
band_names: 波段名称列表
|
||
"""
|
||
output_hdr_file = output_file.with_suffix('.hdr')
|
||
|
||
# 创建基本的HDR文件
|
||
basic_hdr = f"""ENVI
|
||
description = {{
|
||
Angle calculation results for UAV hyperspectral imagery
|
||
}}
|
||
samples = {self.image_info['samples']}
|
||
lines = {self.image_info['lines']}
|
||
bands = {bands}
|
||
header offset = 0
|
||
file type = ENVI Standard
|
||
data type = 4
|
||
interleave = bip
|
||
byte order = 0
|
||
band names = {{
|
||
{chr(10).join(f' {name}' for name in band_names)}
|
||
}}
|
||
"""
|
||
|
||
# 添加地图信息(如果有)
|
||
if 'geo_transform' in self.image_info:
|
||
geo_transform = self.image_info['geo_transform']
|
||
map_info_parts = [
|
||
self.image_info.get('projection', 'UTM'),
|
||
'1.00000', # dummy x start
|
||
'1.00000', # dummy y start
|
||
str(geo_transform[0]), # x_ul
|
||
str(geo_transform[3]), # y_ul
|
||
str(geo_transform[1]), # pixel_x
|
||
str(abs(geo_transform[5])), # pixel_y
|
||
str(self.image_info.get('utm_zone', 51)),
|
||
self.image_info.get('hemisphere', 'North'),
|
||
'WGS-84',
|
||
'units=m',
|
||
'rotation=0.000'
|
||
]
|
||
basic_hdr += f"map info = {{{', '.join(map_info_parts)}}}\n"
|
||
|
||
with open(output_hdr_file, 'w', encoding='utf-8') as f:
|
||
f.write(basic_hdr)
|
||
|
||
print(f"基本HDR文件创建完成: {output_hdr_file}")
|
||
|
||
|
||
def main():
|
||
"""主函数"""
|
||
# 配置参数
|
||
config = {
|
||
'data_dir': r'D:\BaiduNetdiskDownload\20250902\_3_52_52\Geoout\Geoout',
|
||
# times根目录:内部包含多个“航带名”子文件夹,每个子文件夹内包含 航带名.bil.times
|
||
'times_root_dir': r"D:\BaiduNetdiskDownload\20250902\_3_52_52\202592_35252",
|
||
# GPS文件:所有HDR文件共用这一个GPS文件
|
||
'gps_file': r"D:\BaiduNetdiskDownload\20250902\_3_52_52\2025_9_2_3_53_45.gps",
|
||
'output_dir': r'D:\BaiduNetdiskDownload\20250902\_3_52_52\316\agnle',
|
||
'base_height': 254, # 基准高度
|
||
}
|
||
|
||
# 创建计算器
|
||
calculator = UAVAngleCalculator(config)
|
||
|
||
data_dir = Path(config['data_dir'])
|
||
times_root_dir = Path(config['times_root_dir'])
|
||
gps_file = Path(config['gps_file'])
|
||
|
||
if not gps_file.exists():
|
||
raise FileNotFoundError(f"GPS文件不存在: {gps_file}")
|
||
|
||
# 批量处理:自动查找 data_dir 下所有 .hdr 文件
|
||
hdr_files = sorted(data_dir.glob("*.hdr"))
|
||
if not hdr_files:
|
||
raise FileNotFoundError(f"在目录中未找到任何 .hdr 文件: {data_dir}")
|
||
|
||
# 使用进度条处理文件
|
||
if HAS_TQDM:
|
||
hdr_iterator = tqdm(hdr_files, desc="处理HDR文件", unit="文件")
|
||
else:
|
||
hdr_iterator = hdr_files
|
||
print(f"开始处理 {len(hdr_files)} 个HDR文件...")
|
||
|
||
for hdr_file in hdr_iterator:
|
||
try:
|
||
if HAS_TQDM:
|
||
hdr_iterator.set_description(f"处理: {hdr_file.name}")
|
||
else:
|
||
print(f"\n处理文件: {hdr_file.name}")
|
||
|
||
angle_results = calculator.process_image(hdr_file, gps_file, times_root_dir=times_root_dir)
|
||
|
||
base_name = hdr_file.name
|
||
if base_name.lower().endswith('.bip.hdr'):
|
||
base_name = base_name[:-len('.bip.hdr')]
|
||
elif base_name.lower().endswith('.hdr'):
|
||
base_name = base_name[:-len('.hdr')]
|
||
|
||
output_file = Path(config['output_dir']) / f"{base_name}_angles.bip"
|
||
calculator.save_bip_with_angles(angle_results, output_file, calculator.image_info, hdr_file)
|
||
|
||
if not HAS_TQDM:
|
||
print(f"完成: {hdr_file.name} -> {output_file.name}")
|
||
except Exception as e:
|
||
if HAS_TQDM:
|
||
hdr_iterator.set_description(f"失败: {hdr_file.name}")
|
||
print(f"处理失败: {hdr_file.name}: {e}")
|
||
import traceback
|
||
traceback.print_exc()
|
||
continue
|
||
|
||
print("\n处理完成!")
|
||
print(f"输出目录: {config['output_dir']}")
|
||
|
||
|
||
if __name__ == "__main__":
|
||
main() |