This commit is contained in:
Dustella 2025-02-19 11:10:08 +08:00
parent 2f0f3fe22f
commit 8fc63372f7
Signed by: Dustella
GPG Key ID: 35AA0AA3DC402D5C
7 changed files with 365 additions and 276 deletions

View File

@ -1,3 +1,4 @@
import resource
from matplotlib import pyplot as plt
import cosmic
import tidi
@ -13,9 +14,14 @@ import matplotlib.font_manager as fm
app = Quart(__name__)
app = cors(app, send_origin_wildcard=True, allow_origin="*")
# limit the whole app not to use over 8G ram
# use resource module to do this
# resource.setrlimit(resource.RLIMIT_AS, (8 * 1024 * 1024 * 1024, -1))
plt.switch_backend('agg')
fm.fontManager.addfont("./SimHei.ttf")
@app.before_request
def auth():
# check for method
@ -27,10 +33,12 @@ def auth():
if code != "0101":
return "Unauthorized", 401
@app.route("/ping")
def ping():
return "pong"
app.register_blueprint(balloon.balloon_module, url_prefix="/balloon")
app.register_blueprint(radar.radar_module, url_prefix="/radar")
app.register_blueprint(saber.saber_module, url_prefix="/saber")

View File

@ -3,6 +3,8 @@ from io import BytesIO
from quart import Blueprint, request, send_file
from matplotlib import pyplot as plt
from CONSTANT import DATA_BASEPATH
from saber.archive.gravity_plot import do_gravity_plot
from saber.archive.gravity_process import process_gravity_data
from saber.process import DataProcessor
from saber.render import Renderer
from saber.utils import *
@ -19,6 +21,7 @@ saber_module = Blueprint('saber', __name__)
# lat_range=lat_range, alt_range=alt_range, lambda_range=lambda_range, lvboin=lvboin)
renderer = Renderer()
def get_processer():
lat_str = request.args.get("lat_range")
if lat_str is None:
@ -129,3 +132,16 @@ async def do_year_power_wave_plot():
])
await run_sync(renderer.year_power_wave_plot)(year_wave=data)
return await extract_payload()
@saber_module.route("/render/gravity_year")
async def do_gravity_year():
year = request.args.get("year")
T = request.args.get("T")
year = int(year)
T = int(T)
if T not in [5, 10, 16]:
raise ValueError("T must be in [5, 10, 16]")
data = await run_sync(process_gravity_data)(year)
await run_sync(do_gravity_plot)(data, T)
return await extract_payload()

View File

@ -11,7 +11,7 @@ import matplotlib
matplotlib.rcParams['font.sans-serif'] = ['SimHei'] # 显示中文
matplotlib.rcParams['axes.unicode_minus'] = False # 正常显示负号
# 读取一年的数据文件
df = pd.read_csv(r'C:\pythonProject3\combined_data.txt', sep='\s+')
# df = pd.read_csv(r'C:\pythonProject3\combined_data.txt', sep='\s+')
# 设置初始参数
# initial_guess = [1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0] # v0, a1, b1, a2, b2, a3, b3
@ -26,6 +26,7 @@ bounds = (
np.inf, np.inf, np.inf]) # 上界
def do_gravity_plot(df: pd.DataFrame, T=16):
# 定义拟合函数
def u_func(x, *params):
a1, b1, a2, b2, a3, b3, a4, b4, a5, b5, a6, b6, a7, b7, a8, b8, a9, b9 = params
@ -41,7 +42,6 @@ def u_func(x, *params):
+ a9 * np.sin((2 * np.pi / T) * t + 4 * x + b9)
)
# 用于存储拟合参数结果的列表
all_fit_results = []
@ -51,7 +51,7 @@ min_data_points = 36
# 进行多个时间窗口的拟合
# T应该取5、10、16
T = 16 # 设置 T可以动态调整
# T = 16 # 设置 T可以动态调整
for start_day in range(0, 365-3*T): # 最后一个窗口为[351, 366]
end_day = start_day + 3 * T # 每个窗口的结束时间为 start_day + 3*T
@ -94,13 +94,15 @@ k_to_a = {f'k={k}': a for k, a in zip(k_values, a_columns)}
# 获取索引并转换为 numpy 数组
x_values = fit_df.index.to_numpy()
# 对每一列生成独立的图
for k, col in k_to_a.items():
plt.figure(figsize=(8, 6)) # 创建新的图形
plt.plot(x_values, fit_df[col].values)
plt.title(f'{k} 振幅图')
plt.xlabel('Day')
plt.ylabel('振幅')
# 创建一个包含多个子图的图形
fig, axs = plt.subplots(len(k_to_a), 1, figsize=(10, 2 * len(k_to_a)))
# 对每一列生成独立的子图
for ax, (k, col) in zip(axs, k_to_a.items()):
ax.plot(x_values, fit_df[col].values)
ax.set_title(f'{k} 振幅图')
ax.set_xlabel('Day')
ax.set_ylabel('振幅')
# 设置横坐标的动态调整
adjusted_x_values = x_values + (3 * T + 1) / 2
@ -112,6 +114,10 @@ for k, col in k_to_a.items():
tick_positions = adjusted_x_values
tick_labels = [f'{int(val)}' for val in tick_positions]
plt.xticks(ticks=tick_positions, labels=tick_labels)
ax.set_xticks(tick_positions)
ax.set_xticklabels(tick_labels)
plt.show() # 显示图形
plt.tight_layout()
# plt.show()
# plt.show() # 显示图形

View File

@ -1,11 +1,13 @@
# 高度、纬度可以指定得到指定高度纬度下的数据高度在101行一般输入70、90、110纬度在115行纬度一般输入-20、30、60
import os
import netCDF4 as nc
import numpy as np
import netCDF4 as nc
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime, timedelta
import pandas as pd
from scipy.optimize import curve_fit
from scipy.optimize import least_squares
@ -23,6 +25,13 @@ def process_gravity_data(year):
base_path = DATA_BASEPATH.saber + \
"/{year}/SABER_Temp_O3_{month}{year}_v2.0.nc"
cache_path = DATA_BASEPATH.saber + \
f"/{year}/gravity_result.parquet"
if os.path.exists(cache_path):
# load from cache
return pd.read_parquet(cache_path)
# 初始化空数组来存储拼接后的数据
tplongitude = None
tplatitude = None
@ -165,7 +174,12 @@ def process_gravity_data(year):
combined_data = np.column_stack(
(closest_longitudes_radians, closest_times, closest_temperatures))
return combined_data
df = pd.DataFrame(combined_data, columns=[
"Longitude_Radians", "Time", "Temperature"])
# dump to cache
df.to_parquet(cache_path)
return df
# 将数据保存为txt文件

View File

@ -11,6 +11,7 @@ from tidi.staged.plot import tidi_render
tidi_module = Blueprint("Tidi", __name__)
@tidi_module.route('/metadata')
def get_all_years():
res = glob.glob(f"{DATA_BASEPATH.tidi}/**/**.txt", recursive=True)
@ -20,23 +21,30 @@ def get_all_years():
"path": res
}
@tidi_module.route('/render/wave')
async def render_wave():
mode = request.args.get('mode')
year = request.args.get('year')
k = request.args.get('k')
T = request.args.get('T')
height = request.args.get('height')
lat_range = request.args.get('lat_range')
# like `0 ~ 5`
year = int(year)
k = int(k)
T = int(T)
height = float(height)
lat_range = tuple(map(int, lat_range.split('~')))
await run_sync(tidi_render)(mode, year, k, T)
await run_sync(tidi_render)(mode, year, k, height, lat_range, T)
buffer = BytesIO()
plt.savefig(buffer, format="png")
buffer.seek(0)
return await send_file(buffer, mimetype="image/png")
@tidi_module.route('/render/month_stats_v1')
async def render_stats_v1():
year = request.args.get('year')
@ -49,6 +57,7 @@ async def render_stats_v1():
buffer.seek(0)
return await send_file(buffer, mimetype="image/png")
@tidi_module.route('/render/month_stats_v2')
async def render_stats_v2():
year = request.args.get('year')

View File

@ -3,6 +3,7 @@
# 此代码是对数据处理后的txt数据进行行星波参数提取绘图
# 2006_TIDI_V_Meridional_data.txt也可以做同样处理一个是经向风提取的行星波一个是纬向风的
import os
import pandas as pd
import numpy as np
from scipy.optimize import curve_fit
@ -12,25 +13,34 @@ import matplotlib.pyplot as plt
import matplotlib
from CONSTANT import DATA_BASEPATH
from tidi.staged.process import tidi_process_idk
# 设置中文显示和负号正常显示
matplotlib.rcParams['font.sans-serif'] = ['SimHei'] # 显示中文
matplotlib.rcParams['axes.unicode_minus'] = False # 正常显示负号
# 读取一年的数据文件
# 设置初始参数
# initial_guess = [1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0] # v0, a1, b1, a2, b2, a3, b3
initial_guess = [0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5,0.5, 0.5, 0.5, 0.5,0.5, 0.5, 0.5, 0.5] # 9个 a 和 9个 b 参数
initial_guess = [0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5,
0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5] # 9个 a 和 9个 b 参数
# 设置参数界限
bounds = (
[0, -np.inf, 0, -np.inf, 0, -np.inf, 0, -np.inf, 0, -np.inf, 0, -np.inf, 0, -np.inf, 0, -np.inf, 0, -np.inf], # 下界
[0, -np.inf, 0, -np.inf, 0, -np.inf, 0, -np.inf, 0, -np.inf,
0, -np.inf, 0, -np.inf, 0, -np.inf, 0, -np.inf], # 下界
[np.inf, np.inf, np.inf, np.inf, np.inf, np.inf, np.inf, np.inf, np.inf, np.inf, np.inf, np.inf, np.inf, np.inf, np.inf,
np.inf, np.inf, np.inf]) # 上界
# 定义拟合函数
def tidi_render(mode, year, k, T=10):
def tidi_render(
mode,
year,
k,
input_height,
lat_range=(0, 5),
T=10
):
# 用于存储拟合参数结果的列表
all_fit_results = []
@ -40,13 +50,23 @@ def tidi_render(mode, year, k, T=10):
raise ValueError('mode must be V_Zonal or V_Meridional')
if k not in list(range(-4, 5)):
raise ValueError('k must be in range(-4, 5)')
df = pd.read_csv(f'{DATA_BASEPATH.tidi}/{year}/TIDI_{mode}_data.txt', sep='\s+')
data_cache_path = f'{DATA_BASEPATH.tidi}/{year}/TIDI_{mode}h{input_height}r{lat_range[0]}-{lat_range[1]}_data.parquet'
# if cache exist, read from cache
if os.path.exists(data_cache_path):
df = pd.read_parquet(data_cache_path)
else:
df = tidi_process_idk(
year, input_height, lat_range
)[mode]
df.to_parquet(data_cache_path)
# 删除有 NaN 的行
# V_Meridional
df = df.dropna(subset=[mode])
# 进行多个时间窗口的拟合
# T应该取5、10、16
# todo:对于5日波滑动窗口选择15天
def u_func(x, *params):
a1, b1, a2, b2, a3, b3, a4, b4, a5, b5, a6, b6, a7, b7, a8, b8, a9, b9 = params
return (
@ -80,13 +100,15 @@ def tidi_render(mode, year, k, T=10):
temperature = np.array(df_8[mode]) # 经向风速,因变量
# 用T进行拟合
popt, pcov = curve_fit(u_func, x, temperature, p0=initial_guess, bounds=bounds, maxfev=50000)
popt, pcov = curve_fit(u_func, x, temperature,
p0=initial_guess, bounds=bounds, maxfev=50000)
# 将拟合结果添加到列表中
all_fit_results.append(popt)
# 将结果转换为DataFrame
columns = ['a1', 'b1', 'a2', 'b2', 'a3', 'b3', 'a4', 'b4', 'a5', 'b5', 'a6', 'b6', 'a7', 'b7', 'a8', 'b8', 'a9', 'b9']
columns = ['a1', 'b1', 'a2', 'b2', 'a3', 'b3', 'a4', 'b4',
'a5', 'b5', 'a6', 'b6', 'a7', 'b7', 'a8', 'b8', 'a9', 'b9']
fit_df = pd.DataFrame(all_fit_results, columns=columns) # fit_df即为拟合的参数汇总
# -------------------------------画图----------------------------
@ -122,6 +144,7 @@ def tidi_render(mode, year, k, T=10):
# plt.show() # 显示图形
if __name__ == '__main__':
tidi_render('V_Zonal', 2015, 1)
tidi_render('V_Meridional', 2015, 1)
@ -203,7 +226,3 @@ if __name__ == '__main__':
# # 显示图形
# plt.tight_layout()
# plt.show()

View File

@ -8,8 +8,22 @@ from CONSTANT import DATA_BASEPATH
# 第156行代码纬度可以自己选择但是都是每5°做一个选择
# 70km-120km每2.5km一个共21个第六个就是82.5km高度的情况,高度也可以自己选择
fixed_height_index = 6
HEIGHTS_CONSTANT = [
70, 72.5, 75, 77.5, 80, 82.5, 85, 87.5, 90, 92.5, 95, 97.5, 100, 102.5, 105, 107.5, 110, 112.5, 115, 117.5, 120
]
def tidi_process_idk(
year,
input_height: float = 82.5,
lat_range: tuple = (0, 5),
):
# 初始化空列表来存储每天的数据
if lat_range[1] - lat_range[0] != 5:
raise ValueError("lat_range must be 5°")
if input_height not in HEIGHTS_CONSTANT:
raise ValueError(f"input_height must be in {HEIGHTS_CONSTANT}")
fixed_height_index = HEIGHTS_CONSTANT.index(input_height)
height_list = []
lat_list = []
lon_list = []
@ -17,7 +31,10 @@ vmeridional_list = []
vzonal_list = []
# 文件路径
base_path = f"{DATA_BASEPATH.tidi}/2022/"
base_path = f"{DATA_BASEPATH.tidi}/{year}/"
# if dir not exist, raise error
if not os.path.exists(base_path):
raise FileNotFoundError(f"{base_path} not found")
# 循环读取从第1天到第365天的数据
for day in range(1, 366): # 365天数据确保循环次数为365
@ -46,7 +63,8 @@ for day in range(1, 366): # 365天数据确保循环次数为365
height_data = loadmat(os.path.join(base_path, height_file))
lat_data = loadmat(os.path.join(base_path, lat_file))
lon_data = loadmat(os.path.join(base_path, lon_file))
vmeridional_data = loadmat(os.path.join(base_path, vmeridional_file))
vmeridional_data = loadmat(
os.path.join(base_path, vmeridional_file))
vzonal_data = loadmat(os.path.join(base_path, vzonal_file))
# 将数据转换为DataFrame
@ -112,7 +130,6 @@ UTime_df = pd.concat(UTime_list, ignore_index=True)
# 将UTime_df的单位从小时转换为天除以24
UTime_df = UTime_df / 24
# 获取固定高度下的纬度数据
fixed_height_lat = lat_df.iloc[:, 0].values
# 获取固定高度下的经度数据
@ -149,13 +166,18 @@ for i in range(len(fixed_height_lat)):
lat_idx = np.digitize(fixed_height_lat[i], lat_bins) - 1 # 找到纬度所在的网格索引
lon_idx = np.digitize(fixed_height_lon[i], lon_bins) - 1 # 找到经度所在的网格索引
grid_idx = lat_idx * 12 + lon_idx # 计算网格的唯一索引9x12=108
grid_data.append([fixed_height_lat[i], fixed_height_lon[i], fixed_height_vmeridional[i], fixed_height_vzonal[i], fixed_height_utime[i], grid_idx])
grid_data.append([fixed_height_lat[i], fixed_height_lon[i], fixed_height_vmeridional[i],
fixed_height_vzonal[i], fixed_height_utime[i], grid_idx])
# 将网格数据转换为DataFrame
grid_df = pd.DataFrame(grid_data, columns=['Latitude', 'Longitude', 'V_Meridional', 'V_Zonal', 'UTime', 'Grid_Index'])
grid_df = pd.DataFrame(grid_data, columns=[
'Latitude', 'Longitude', 'V_Meridional', 'V_Zonal', 'UTime', 'Grid_Index'])
# 打印网格数据的前几行查看
print(grid_df.head())
# 筛选纬度在 0 到 5° 范围内的数据
filtered_df = grid_df[(grid_df['Latitude'] >= 0) & (grid_df['Latitude'] <= 5)]
lat_range_down, lat_range_upper = lat_range
filtered_df = grid_df[(grid_df['Latitude'] >= lat_range_down) &
(grid_df['Latitude'] <= lat_range_upper)]
# 将经度从度转换为弧度
filtered_df['Longitude_Radians'] = np.radians(filtered_df['Longitude'])
# 选择需要输出的列
@ -163,26 +185,21 @@ output_columns = ['Longitude_Radians', 'UTime', 'V_Zonal']
# 将数据输出为txt文件
output_file = base_path+'TIDI_V_Zonal_data.txt' # 经向风速分量
filtered_df[output_columns].to_csv(output_file, sep='\t', index=False)
# 打印结果确认
print(f"数据已保存到 {output_file}")
zonal_data = filtered_df[output_columns]
# 选择需要输出的列
output_columns1 = ['Longitude_Radians', 'UTime', 'V_Meridional']
# 将数据输出为txt文件
output_file1 = base_path+'TIDI_V_Meridional_data.txt'#纬向风速分量
filtered_df[output_columns1].to_csv(output_file1, sep='\t', index=False)
# 打印结果确认
print(f"数据已保存到 {output_file1}")
merdi_data = filtered_df[output_columns1]
return {
"V_Zonal": zonal_data,
"V_Meridional": merdi_data
}
# output_file1 = base_path+'TIDI_V_Meridional_data.txt' # 纬向风速分量
# final_result.to_csv(output_file1, sep='\t', index=False)
# # 打印结果确认
# print(f"数据已保存到 {output_file1}")
# # 对网格数据按照Grid_Index进行分组并计算每个网格的统计数据