sync
This commit is contained in:
parent
05a2ea6c16
commit
f05fcd29b5
@ -56,6 +56,7 @@ async def index(path):
|
||||
if app.static_folder:
|
||||
# check if the file exists
|
||||
if os.path.exists(os.path.join(app.static_folder, path)):
|
||||
#
|
||||
return await app.send_static_file(path)
|
||||
return await app.send_static_file("index.html")
|
||||
|
||||
|
||||
3
build.sh
Normal file
3
build.sh
Normal file
@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
pyinstaller --add-data "./res/*:res" --add-data "./res/assets/*:res/assets" ./backend.py
|
||||
@ -76,7 +76,7 @@ async def render_single_path():
|
||||
response_data = {
|
||||
"image": img_str,
|
||||
"metadata": {
|
||||
"是否是地形波": "是" if result.is_terrain_wave else "否",
|
||||
"是否是地形重力波": "是" if result.is_terrain_wave else "否",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -80,9 +80,12 @@ def get_ballon_files():
|
||||
def get_ballon_path_by_year(start_year, end_year, station=None):
|
||||
all_ballon_files = get_ballon_files()
|
||||
return list(filter(
|
||||
lambda x: any(f"{station}-{year}" in x for year in range(
|
||||
lambda x: any(f"-{year}" in x for year in range(
|
||||
start_year, end_year + 1)),
|
||||
all_ballon_files
|
||||
|
||||
list(filter(
|
||||
lambda x: f"{station}" in x,
|
||||
all_ballon_files))
|
||||
))
|
||||
|
||||
|
||||
|
||||
@ -27,7 +27,7 @@ async def render():
|
||||
year = request.args.get("year", 2008)
|
||||
T = request.args.get("T", 16)
|
||||
k = request.args.get("k", 1)
|
||||
target_h = request.args.get("target_h", 40)
|
||||
target_h = request.args.get("target_h", 100)
|
||||
target_latitude = request.args.get("target_lat", 30)
|
||||
|
||||
# path: str = f"{DATA_BASEPATH.cosmic}/cosmic.txt"
|
||||
@ -89,6 +89,10 @@ async def single_render():
|
||||
await run_sync(p.plot_results_mean_ktemp_Nz)()
|
||||
elif mode == "mean_ktemp_Ptz":
|
||||
await run_sync(p.plot_results_mean_ktemp_Ptz)()
|
||||
elif mode == 'residual_mean':
|
||||
await run_sync(p.plot_results_residuals_means)()
|
||||
elif mode == 'background_temp':
|
||||
await run_sync(p.plot_results_background_temp)()
|
||||
else:
|
||||
raise ValueError("Invalid mode")
|
||||
|
||||
@ -103,7 +107,7 @@ async def single_render():
|
||||
async def multiday_render():
|
||||
year = request.args.get("year", 2008)
|
||||
start_day = request.args.get("start_day", 1)
|
||||
end_day = request.args.get("end_day", 204)
|
||||
end_day = request.args.get("end_day", 365)
|
||||
mode = request.args.get("mode", "位温分布")
|
||||
lat_range = request.args.get("lat_range", "30.0 ~ 40.0")
|
||||
lat_range = tuple(map(float, lat_range.split("~")))
|
||||
|
||||
@ -11,7 +11,7 @@ import matplotlib.font_manager as fm
|
||||
from CONSTANT import DATA_BASEPATH
|
||||
from modules.cosmic.gravityw_multiday_process import process_single_file
|
||||
|
||||
DAY_RANGE = (0, 204)
|
||||
DAY_RANGE = (0, 365)
|
||||
|
||||
# 设置支持中文的字体
|
||||
plt.rcParams['axes.unicode_minus'] = False # 正常显示负号
|
||||
@ -65,7 +65,7 @@ def plot_heatmap(data, heights, title):
|
||||
plt.figure(figsize=(10, 8))
|
||||
# 绘制热力图,数据中的行代表高度,列代表天数
|
||||
sns.heatmap(data, cmap='coolwarm', xticklabels=1,
|
||||
yticklabels=50, cbar_kws={'label': 'Value'})
|
||||
yticklabels=50, cbar_kws={'label': '强度'})
|
||||
plt.xlabel('天')
|
||||
plt.ylabel('高度 (km)')
|
||||
plt.title(title)
|
||||
@ -97,13 +97,13 @@ class GravityMultidayPlot:
|
||||
# 调用函数绘制热力图
|
||||
data = self.data
|
||||
plot_heatmap(data, self.heights,
|
||||
'"最终平均温度场的热图 (单位:10^(-4)),随浮力频率Nz变化')
|
||||
'年重力势能逐日随高度变化热力图(单位:10^(-4))')
|
||||
|
||||
def plot_heatmap_tempPtz(self):
|
||||
# -------------绘制重力势能年统计图------------------------------------------------
|
||||
data = self.df_final_mean_ktemp_Ptz.T
|
||||
plot_heatmap(data, self.heights,
|
||||
'最终平均动能温度-位势温度的热图 (单位:焦耳/千克)')
|
||||
'年浮力频率的平方逐日随高度变化热力图(单位:焦耳/千克)')
|
||||
|
||||
def plot_monthly_tempNz(self):
|
||||
# ------------------------绘制月统计图---------------------------------------------------------------------------------
|
||||
@ -139,9 +139,9 @@ class GravityMultidayPlot:
|
||||
plt.plot(averaged_by_rows_df.columns, averaged_by_rows_df.mean(
|
||||
axis=0), marker='o', color='b', label='平均值')
|
||||
# 添加标题和标签
|
||||
plt.title('每月平均N^2的折线图')
|
||||
plt.title('每月浮力频率的折线图')
|
||||
plt.xlabel('月份')
|
||||
plt.ylabel('N^2(10^-4)')
|
||||
plt.ylabel('浮力频率(10^-4)')
|
||||
plt.legend()
|
||||
# 显示图形
|
||||
plt.grid(True)
|
||||
|
||||
@ -231,7 +231,13 @@ def wave_fitting_and_filtering(merged_df):
|
||||
# 计算同一高度下的背景温度(wn0 + wn1 + wn2 + wn3 + wn4 + wn5)
|
||||
wn_sum = np.sum([wn0] + wn_curves, axis=0)
|
||||
background_temperatures[altitude] = wn_sum
|
||||
return fit_results, residuals, background_temperatures
|
||||
# 计算均值
|
||||
residuals_means = np.array(
|
||||
[np.mean(residuals[altitude]) for altitude in residuals])
|
||||
background_temperatures_means = np.array(
|
||||
[np.mean(background_temperatures[altitude]) for altitude in background_temperatures])
|
||||
|
||||
return fit_results, residuals, background_temperatures, residuals_means, background_temperatures_means
|
||||
|
||||
|
||||
# 模块5: 带通滤波处理
|
||||
@ -361,6 +367,7 @@ def plot_results_mean_ktemp_Nz(mean_ktemp_Nz, heights):
|
||||
plt.plot(mean_ktemp_Nz, heights / 1000) # 高度单位换算为km,方便展示
|
||||
plt.xlabel('平均浮力频率 (N_z)')
|
||||
plt.ylabel('高度(km)')
|
||||
plt.title('平均浮力频率随高度变化')
|
||||
# plt.gca().invert_yaxis() # 使高度坐标轴从上到下递增,符合常规习惯
|
||||
# plt.show()
|
||||
|
||||
@ -376,6 +383,31 @@ def plot_results_mean_ktemp_Ptz(mean_ktemp_Ptz, heights):
|
||||
plt.plot(mean_ktemp_Ptz, heights / 1000) # 高度单位换算为km,方便展示
|
||||
plt.xlabel('平均势能 (PT_z)')
|
||||
plt.ylabel('高度(km)')
|
||||
plt.title('平均势能随高度变化')
|
||||
# plt.gca().invert_yaxis() # 使高度坐标轴从上到下递增,符合常规习惯
|
||||
# plt.show()
|
||||
|
||||
|
||||
# 模块8 绘制残差温度值曲线图和背景温度曲线图
|
||||
def plot_results1(residuals_means, heights):
|
||||
|
||||
plt.close()
|
||||
|
||||
plt.figure(figsize=(10, 6))
|
||||
plt.plot(residuals_means, heights / 1000) # 高度单位换算为km,方便展示
|
||||
plt.title("残差温度值曲线图")
|
||||
plt.xlabel('残差(开尔文)')
|
||||
plt.ylabel('高度(千米)')
|
||||
# plt.gca().invert_yaxis() # 使高度坐标轴从上到下递增,符合常规习惯
|
||||
# plt.show()
|
||||
|
||||
|
||||
def plot_results_background_temp(background_temperatures_means, heights):
|
||||
plt.figure(figsize=(10, 6))
|
||||
plt.plot(background_temperatures_means, heights / 1000) # 高度单位换算为km,方便展示
|
||||
plt.title("背景温度曲线图")
|
||||
plt.xlabel('背景温度(开尔文)')
|
||||
plt.ylabel('高度(千米)')
|
||||
# plt.gca().invert_yaxis() # 使高度坐标轴从上到下递增,符合常规习惯
|
||||
# plt.show()
|
||||
|
||||
@ -396,7 +428,7 @@ class CosmicGravitywPlot:
|
||||
merged_df = calculate_wn0_and_perturbation(
|
||||
latitude_filtered_df, altitude_temperature_mean)
|
||||
# 模块4调用
|
||||
fit_results, residuals, background_temperatures = wave_fitting_and_filtering(
|
||||
fit_results, residuals, background_temperatures, residuals_means, background_temperatures_means = wave_fitting_and_filtering(
|
||||
merged_df)
|
||||
# 模块5调用
|
||||
result = bandpass_filtering(residuals)
|
||||
@ -410,6 +442,9 @@ class CosmicGravitywPlot:
|
||||
self.mean_ktemp_Nz = mean_ktemp_Nz
|
||||
self.mean_ktemp_Ptz = mean_ktemp_Ptz
|
||||
self.heights = heights
|
||||
|
||||
self.residuals_means = residuals_means
|
||||
self.background_temperatures_means = background_temperatures_means
|
||||
# 调用绘图模块函数进行绘图
|
||||
|
||||
def plot_results_mean_ktemp_Nz(self):
|
||||
@ -417,3 +452,10 @@ class CosmicGravitywPlot:
|
||||
|
||||
def plot_results_mean_ktemp_Ptz(self):
|
||||
plot_results_mean_ktemp_Ptz(self.mean_ktemp_Ptz, self.heights)
|
||||
|
||||
def plot_results_residuals_means(self):
|
||||
plot_results1(self.residuals_means, self.heights)
|
||||
|
||||
def plot_results_background_temp(self):
|
||||
plot_results_background_temp(
|
||||
self.background_temperatures_means, self.heights)
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
# 高度和target_latitude可以自己选择,纬度可以选+-30,+-60,高度可以每10km
|
||||
import logging
|
||||
import netCDF4 as nc
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
@ -9,6 +10,87 @@ import matplotlib.pyplot as plt
|
||||
from CONSTANT import DATA_BASEPATH
|
||||
|
||||
|
||||
def _process_per_folder(base_folder_path, i, target_h):
|
||||
# 根据i的值调整文件夹名称
|
||||
if i < 10:
|
||||
folder_name = f"atmPrf_repro2021_2008_00{i}" # 一位数,前面加两个0
|
||||
elif i < 100:
|
||||
folder_name = f"atmPrf_repro2021_2008_0{i}" # 两位数,前面加一个0
|
||||
else:
|
||||
folder_name = f"atmPrf_repro2021_2008_{i}" # 三位数,不加0
|
||||
|
||||
# 构建当前文件夹的路径
|
||||
folder_path = os.path.join(base_folder_path, folder_name)
|
||||
|
||||
# 检查文件夹是否存在
|
||||
if not os.path.exists(folder_path):
|
||||
print(f"文件夹 {folder_path} 不存在。")
|
||||
return
|
||||
|
||||
# 遍历文件夹中的文件
|
||||
result_folder_level = []
|
||||
folder_level_cache_path = f"{folder_path}/planet_{target_h}.parquet"
|
||||
if os.path.exists(folder_level_cache_path):
|
||||
cache_content = pd.read_parquet(folder_level_cache_path)
|
||||
# logging.info(f"读取缓存文件 {folder_level_cache_path}")
|
||||
# dfs.append(cache_content)
|
||||
return [cache_content]
|
||||
|
||||
logging.info(f"处理文件夹 {folder_path}")
|
||||
for file_name in os.listdir(folder_path):
|
||||
if not file_name.endswith('.0390_nc'):
|
||||
continue
|
||||
|
||||
finfo = os.path.join(folder_path, file_name)
|
||||
try:
|
||||
dataset = nc.Dataset(finfo, 'r')
|
||||
# 提取变量数据
|
||||
temp = dataset.variables['Temp'][:]
|
||||
altitude = dataset.variables['MSL_alt'][:]
|
||||
lat = dataset.variables['Lat'][:]
|
||||
lon = dataset.variables['Lon'][:]
|
||||
# 读取month, hour, minute
|
||||
month = dataset.month
|
||||
hour = dataset.hour
|
||||
minute = dataset.minute
|
||||
# 将i的值作为day的值
|
||||
day = i
|
||||
# 将day, hour, minute拼接成一个字符串
|
||||
datetime_str = f"{day:03d}{hour:02d}{minute:02d}"
|
||||
# 确保所有变量都是一维数组
|
||||
temp = np.squeeze(temp)
|
||||
altitude = np.squeeze(altitude)
|
||||
lat = np.squeeze(lat)
|
||||
lon = np.squeeze(lon)
|
||||
# 检查所有数组的长度是否相同
|
||||
assert len(temp) == len(altitude) == len(lat) == len(
|
||||
lon), "Arrays must be the same length"
|
||||
# 创建DataFrame,并将datetime_str作为第一列添加
|
||||
df = pd.DataFrame({
|
||||
'Datetime_str': datetime_str,
|
||||
'Longitude': lon,
|
||||
'Latitude': lat,
|
||||
'Altitude': altitude,
|
||||
'Temperature': temp
|
||||
})
|
||||
dataset.close()
|
||||
# 仅筛选高度
|
||||
|
||||
df_filtered = df[(df['Altitude'] >= target_h - 0.5)
|
||||
& (df['Altitude'] <= target_h + 0.5)]
|
||||
result_folder_level.append(df_filtered)
|
||||
except Exception as e:
|
||||
print(f"处理文件 {finfo} 时出错: {e}")
|
||||
# save to parquet
|
||||
# merge them to a big dataframe, save to parquet
|
||||
result_folder_level = pd.concat(
|
||||
result_folder_level, axis=0, ignore_index=True)
|
||||
logging.info(f"保存缓存文件 {folder_level_cache_path}")
|
||||
logging.info(f"result_folder_length: {result_folder_level.__len__()}")
|
||||
result_folder_level.to_parquet(folder_level_cache_path)
|
||||
return [result_folder_level]
|
||||
|
||||
|
||||
def cosmic_planet_daily_process(
|
||||
year=2008,
|
||||
target_latitude=30,
|
||||
@ -37,73 +119,21 @@ def cosmic_planet_daily_process(
|
||||
folder_path = os.path.join(base_folder_path, folder_name)
|
||||
|
||||
# 检查文件夹是否存在
|
||||
if os.path.exists(folder_path):
|
||||
# 遍历文件夹中的文件
|
||||
folder_level_cache_path = f"{folder_path}/planet_{target_h}.parquet"
|
||||
if os.path.exists(folder_level_cache_path):
|
||||
result_folder_level = pd.read_parquet(folder_level_cache_path)
|
||||
dfs.append(result_folder_level)
|
||||
continue
|
||||
result_folder_level = []
|
||||
for file_name in os.listdir(folder_path):
|
||||
|
||||
if not file_name.endswith('.0390_nc'):
|
||||
continue
|
||||
|
||||
finfo = os.path.join(folder_path, file_name)
|
||||
print(f"正在处理文件: {finfo}")
|
||||
try:
|
||||
dataset = nc.Dataset(finfo, 'r')
|
||||
# 提取变量数据
|
||||
temp = dataset.variables['Temp'][:]
|
||||
altitude = dataset.variables['MSL_alt'][:]
|
||||
lat = dataset.variables['Lat'][:]
|
||||
lon = dataset.variables['Lon'][:]
|
||||
# 读取month, hour, minute
|
||||
month = dataset.month
|
||||
hour = dataset.hour
|
||||
minute = dataset.minute
|
||||
# 将i的值作为day的值
|
||||
day = i
|
||||
# 将day, hour, minute拼接成一个字符串
|
||||
datetime_str = f"{day:03d}{hour:02d}{minute:02d}"
|
||||
# 确保所有变量都是一维数组
|
||||
temp = np.squeeze(temp)
|
||||
altitude = np.squeeze(altitude)
|
||||
lat = np.squeeze(lat)
|
||||
lon = np.squeeze(lon)
|
||||
# 检查所有数组的长度是否相同
|
||||
assert len(temp) == len(altitude) == len(lat) == len(
|
||||
lon), "Arrays must be the same length"
|
||||
# 创建DataFrame,并将datetime_str作为第一列添加
|
||||
df = pd.DataFrame({
|
||||
'Datetime_str': datetime_str,
|
||||
'Longitude': lon,
|
||||
'Latitude': lat,
|
||||
'Altitude': altitude,
|
||||
'Temperature': temp
|
||||
})
|
||||
dataset.close()
|
||||
# 仅筛选高度
|
||||
|
||||
df_filtered = df[(df['Altitude'] >= target_h - 0.5)
|
||||
& (df['Altitude'] <= target_h + 0.5)]
|
||||
dfs.append(df_filtered)
|
||||
result_folder_level.append(df_filtered)
|
||||
except Exception as e:
|
||||
print(f"处理文件 {finfo} 时出错: {e}")
|
||||
# save to parquet
|
||||
result_folder_level = pd.concat(
|
||||
result_folder_level, axis=0, ignore_index=True)
|
||||
result_folder_level.to_parquet(folder_level_cache_path)
|
||||
else:
|
||||
if not os.path.exists(folder_path):
|
||||
print(f"文件夹 {folder_path} 不存在。")
|
||||
continue
|
||||
|
||||
# 遍历文件夹中的文件
|
||||
result_folder_level = _process_per_folder(
|
||||
base_folder_path, i, target_h)
|
||||
|
||||
if result_folder_level is not None:
|
||||
dfs.extend(result_folder_level)
|
||||
|
||||
# 按行拼接所有DataFrame
|
||||
final_df = pd.concat(dfs, axis=0, ignore_index=True)
|
||||
# 假设 final_df 是你的大 DataFrame
|
||||
final_df.to_csv("output365.txt", index=False, sep=",") # 逗号分隔
|
||||
print("数据已保存为 output365.txt")
|
||||
|
||||
# 计算差值找到最接近的索引来筛选纬度为30的数据
|
||||
# 目标值
|
||||
|
||||
@ -113,7 +143,7 @@ def cosmic_planet_daily_process(
|
||||
|
||||
def find_closest_row(group):
|
||||
return group.loc[group['Latitude_diff'].idxmin()]
|
||||
|
||||
logging.info(f"final_df: {final_df.head()}")
|
||||
# 使用groupby按Day分组并找到每个分组的最接近的记录
|
||||
closest_per_day = final_df.groupby('Datetime_str', as_index=False).apply(
|
||||
find_closest_row, include_groups=False)
|
||||
@ -128,15 +158,18 @@ def cosmic_planet_daily_process(
|
||||
|
||||
def convert_to_days(datetime_str):
|
||||
# 获取日期、小时和分钟
|
||||
dd = int(datetime_str[:3]) # 日期部分 (DDD)
|
||||
hh = int(datetime_str[3:5]) # 小时部分 (HH)
|
||||
mm = int(datetime_str[5:]) # 分钟部分 (MM)
|
||||
try:
|
||||
dd = int(datetime_str[:3]) # 日期部分 (DDD)
|
||||
hh = int(datetime_str[3:5]) # 小时部分 (HH)
|
||||
mm = int(datetime_str[5:]) # 分钟部分 (MM)
|
||||
|
||||
# 计算总分钟数
|
||||
total_minutes = dd * 1440 + hh * 60 + mm
|
||||
# 计算总分钟数
|
||||
total_minutes = dd * 1440 + hh * 60 + mm
|
||||
|
||||
# 转换为天数
|
||||
return total_minutes / 1440
|
||||
# 转换为天数
|
||||
return total_minutes / 1440
|
||||
except Exception as e:
|
||||
logging.error(f"转换失败: {datetime_str}")
|
||||
|
||||
# 应用函数转换
|
||||
try:
|
||||
@ -145,7 +178,7 @@ def cosmic_planet_daily_process(
|
||||
except Exception as e:
|
||||
print(e)
|
||||
# 时间t
|
||||
day_data = closest_per_day['Time']
|
||||
# day_data = closest_per_day['Time']
|
||||
# 计算背景温度,时间Day和弧度经度的函数
|
||||
background_temperature = closest_per_day['Temperature'].mean()
|
||||
# 计算温度扰动
|
||||
|
||||
@ -61,6 +61,12 @@ def cosmic_planetw_plot_perday(
|
||||
x = np.array(df_8['Longitude_Radians']) # 经度弧度制
|
||||
temperature = np.array(df_8['Temperature']) # 温度,因变量
|
||||
|
||||
# `tempeture` 为因变量,`x` 为自变量,`t` 为参数
|
||||
# tempeture is possible to contain NaNs, so we need to drop them
|
||||
mask = ~np.isnan(temperature)
|
||||
x = x[mask]
|
||||
temperature = temperature[mask]
|
||||
|
||||
# 用T进行拟合
|
||||
popt, pcov = curve_fit(lambda x, *params: u_func(x, *params, t=t),
|
||||
x, temperature, p0=initial_guess, bounds=bounds, maxfev=50000)
|
||||
|
||||
@ -156,15 +156,15 @@ class SaberGravitywRenderer:
|
||||
# 原始信号的时间序列
|
||||
plt.subplot(1, 2, 1)
|
||||
plt.plot(x1[::-1], y, label='原始信号')
|
||||
plt.title('(a)Nz')
|
||||
plt.xlabel('势能', labelpad=10) # 增加标签间距
|
||||
# plt.title('(a)Nz')
|
||||
plt.xlabel('浮力频率平方', labelpad=10) # 增加标签间距
|
||||
plt.ylabel('高度 (km)', labelpad=10) # 增加标签间距
|
||||
|
||||
# 原始信号的时间序列
|
||||
plt.subplot(1, 2, 2)
|
||||
plt.plot(x2[::-1], y, label='原始信号')
|
||||
plt.title('(b)Ptz')
|
||||
plt.xlabel('浮力频率', labelpad=10) # 增加标签间距
|
||||
# plt.title('(b)Ptz')
|
||||
plt.xlabel('重力势能', labelpad=10) # 增加标签间距
|
||||
plt.ylabel('高度 (km)', labelpad=10) # 增加标签间距
|
||||
|
||||
# 调整子图之间的边距
|
||||
|
||||
@ -3,6 +3,7 @@
|
||||
# 此代码是对数据处理后的txt数据进行行星波参数提取绘图
|
||||
# 2006_TIDI_V_Meridional_data.txt也可以做同样处理,一个是经向风提取的行星波,一个是纬向风的
|
||||
|
||||
import logging
|
||||
import os
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
@ -22,7 +23,7 @@ matplotlib.rcParams['axes.unicode_minus'] = False # 正常显示负号
|
||||
# initial_guess = [1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0] # v0, a1, b1, a2, b2, a3, b3
|
||||
initial_guess = [0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5,
|
||||
0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5] # 9个 a 和 9个 b 参数
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
# 设置参数界限
|
||||
bounds = (
|
||||
[0, -np.inf, 0, -np.inf, 0, -np.inf, 0, -np.inf, 0, -np.inf,
|
||||
@ -31,8 +32,6 @@ bounds = (
|
||||
np.inf, np.inf, np.inf]) # 上界
|
||||
|
||||
|
||||
# 定义拟合函数
|
||||
|
||||
def TidiPlotPlanetWDaily(
|
||||
mode,
|
||||
year,
|
||||
@ -53,12 +52,16 @@ def TidiPlotPlanetWDaily(
|
||||
data_cache_path = f'{DATA_BASEPATH.tidi}/{year}/TIDI_{mode}h{input_height}r{lat_range[0]}-{lat_range[1]}_data.parquet'
|
||||
# if cache exist, read from cache
|
||||
if os.path.exists(data_cache_path):
|
||||
logging.info(f'loading data from cache: {data_cache_path}')
|
||||
df = pd.read_parquet(data_cache_path)
|
||||
logging.info(df.head())
|
||||
else:
|
||||
# 读取一年的数据文件
|
||||
logging.info(f'loading data from raw data: {year}')
|
||||
df = extract_tidi_planetw_data(
|
||||
year, input_height, lat_range
|
||||
)[mode]
|
||||
df.to_parquet(data_cache_path)
|
||||
# df.to_parquet(data_cache_path)
|
||||
|
||||
# 删除有 NaN 的行
|
||||
# V_Meridional
|
||||
@ -89,7 +92,8 @@ def TidiPlotPlanetWDaily(
|
||||
# 检查当前窗口的数据量
|
||||
if len(df_8) < min_data_points:
|
||||
# 输出数据量不足的警告
|
||||
print(f"数据量不足,无法拟合:{start_day} 到 {end_day},数据点数量:{len(df_8)}")
|
||||
logging.debug(
|
||||
f"数据量不足,无法拟合:{start_day} 到 {end_day},数据点数量:{len(df_8)}")
|
||||
# 将拟合参数设置为 NaN
|
||||
all_fit_results.append([np.nan] * 18)
|
||||
continue # 跳过当前时间窗口,继续下一个窗口
|
||||
@ -111,6 +115,10 @@ def TidiPlotPlanetWDaily(
|
||||
'a5', 'b5', 'a6', 'b6', 'a7', 'b7', 'a8', 'b8', 'a9', 'b9']
|
||||
fit_df = pd.DataFrame(all_fit_results, columns=columns) # fit_df即为拟合的参数汇总
|
||||
|
||||
# 如果df 长度为0,说明没有数据,直接返回
|
||||
|
||||
logging.info(fit_df.head())
|
||||
|
||||
# -------------------------------画图----------------------------
|
||||
# a1-a9,对应波数-4、-3、-2、-1、0、1、2、3、4的行星波振幅
|
||||
a_columns = ['a1', 'a2', 'a3', 'a4', 'a5', 'a6', 'a7', 'a8', 'a9']
|
||||
@ -122,8 +130,14 @@ def TidiPlotPlanetWDaily(
|
||||
# 获取索引并转换为 numpy 数组
|
||||
x_values = fit_df.index.to_numpy()
|
||||
|
||||
# 对每一列生成独立的图
|
||||
col = k_to_a[f'k={k}']
|
||||
if fit_df[col].isna().all():
|
||||
# 画一张图,就写一行字,“数组不足以拟合”
|
||||
plt.figure(figsize=(8, 6)) # 创建新的图形
|
||||
plt.text(0.5, 0.5, '数组不足以拟合', fontsize=24, ha='center')
|
||||
plt.axis('off')
|
||||
return
|
||||
# 对每一列生成独立的图
|
||||
plt.figure(figsize=(8, 6)) # 创建新的图形
|
||||
plt.plot(x_values, fit_df[col].values)
|
||||
plt.title(f'k={k} 振幅图')
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
# 原始文件 TIDI行星波数据处理.py
|
||||
|
||||
import logging
|
||||
import os
|
||||
import numpy as np
|
||||
from scipy.io import loadmat
|
||||
@ -26,6 +27,7 @@ def extract_tidi_planetw_data(
|
||||
if input_height not in HEIGHTS_CONSTANT:
|
||||
raise ValueError(f"input_height must be in {HEIGHTS_CONSTANT}")
|
||||
fixed_height_index = HEIGHTS_CONSTANT.index(input_height)
|
||||
logging.info(f"fixed_height_index: {fixed_height_index}")
|
||||
height_list = []
|
||||
lat_list = []
|
||||
lon_list = []
|
||||
@ -74,6 +76,7 @@ def extract_tidi_planetw_data(
|
||||
lat_df = pd.DataFrame(lat_data['Lat'])
|
||||
lon_df = pd.DataFrame(lon_data['Lon'])
|
||||
vmeridional_df = pd.DataFrame(vmeridional_data['VMerdional'])
|
||||
# logging.info(f"vmeridional_df: {vmeridional_df}")
|
||||
vzonal_df = pd.DataFrame(vzonal_data['Vzonal'])
|
||||
|
||||
# 将每天的数据添加到列表中
|
||||
@ -91,7 +94,9 @@ def extract_tidi_planetw_data(
|
||||
lat_df = pd.concat(lat_list, ignore_index=True)
|
||||
lon_df = pd.concat(lon_list, ignore_index=True)
|
||||
vmeridional_df = pd.concat(vmeridional_list, axis=1) # 按列合并
|
||||
logging.info(f"vmeridional_df shape: {vmeridional_df.shape}")
|
||||
vzonal_df = pd.concat(vzonal_list, axis=1) # 按列合并
|
||||
logging.info(f"vzonal_df shape: {vzonal_df.shape}")
|
||||
|
||||
# 初始化空列表来存储每天的数据
|
||||
UTime_list = []
|
||||
@ -138,6 +143,8 @@ def extract_tidi_planetw_data(
|
||||
fixed_height_lon = lon_df.iloc[:, 0].values
|
||||
# 获取固定高度下的经向风数据
|
||||
fixed_height_vmeridional = vmeridional_df.iloc[fixed_height_index, :].values
|
||||
logging.info(
|
||||
f"fixed_height_vmeridional shape: {fixed_height_vmeridional.shape}")
|
||||
# 获取固定高度下的纬向风数据
|
||||
fixed_height_vzonal = vzonal_df.iloc[fixed_height_index, :].values
|
||||
# 获取对应的世界时数据(这里直接使用整个UTime数据,你可以根据实际情况判断是否需要处理下格式等)
|
||||
@ -152,7 +159,7 @@ def extract_tidi_planetw_data(
|
||||
})
|
||||
|
||||
# 简单打印下整合后的数据前几行查看下内容
|
||||
print(combined_data)
|
||||
print(combined_data["V_Meridional"].unique())
|
||||
|
||||
# 确定纬度和经度的范围
|
||||
lat_min, lat_max = np.min(fixed_height_lat), np.max(fixed_height_lat)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user