Hello everyone, how are you?
I recently received this data in Cartesian format from EEC’s EDGE software to inspect. Normally I receive the volumetric data in polar coordinates and then convert it to Cartesian. But this time I received it already converted to Cartesian. I was thinking about how to process this in pyart and created the code below to import it. It seems to work, and I hope it can help anyone who has the same problem. Feel free to make any changes you deem necessary. I am also sharing some samples of the data. here
import numpy as np
import pyart
import cartopy.crs as ccrs
import netCDF4
from datetime import datetime
import matplotlib.pyplot as plt
def process_grid_edge(file_path, data_field):
"""
Processes NetCDF data to create a grid.
Parameters:
- file_path (str): Path to the NetCDF file.
- data_field (str): Name of the data field (e.g., 'DBZH').
Returns:
- grid (pyart.core.Grid): Created grid object.
"""
# Open the NetCDF file
dset = netCDF4.Dataset(file_path, mode='r')
try:
# Clean up extra bytes and conversion
start_time_raw = dset.variables['time_coverage_start'][:].tobytes().decode('utf-8').strip('\x00')
end_time_raw = dset.variables['time_coverage_end'][:].tobytes().decode('utf-8').strip('\x00')
# Convert times to datetime objects
start_time_dt = datetime.fromisoformat(start_time_raw.replace('Z', ''))
end_time_dt = datetime.fromisoformat(end_time_raw.replace('Z', ''))
# Calculate time in seconds since the start
time_diff_seconds = (end_time_dt - start_time_dt).total_seconds()
# Build the time dictionary
time_dictionary = {
'long_name': 'Time of grid',
'units': f'seconds since {start_time_dt.isoformat()}Z',
'standard_name': 'time',
'calendar': 'gregorian',
'data': np.ma.masked_array(data=[time_diff_seconds], mask=False, fill_value=1e+20)
}
# Retrieve dimensions and data for the desired variable
data = dset.variables[data_field][:]
origin_latitude = dset.variables['radar_latitude'][:].data
origin_longitude = dset.variables['radar_longitude'][:].data
origin_altitude = dset.variables['radar_altitude'][:].data
# Retrieve x and y limits from the NetCDF file
x_data = dset.variables['x'][:]
y_data = dset.variables['y'][:]
# Calculate limits centered around zero
x_extent = np.abs(x_data.min())
y_extent = np.abs(y_data.min())
# Define z-axis limits based on the title
if dset.title == 'xyCMAX':
z_limits = (0, 0)
elif dset.title == 'xyCAPPI':
z_value = float(dset.variables['cappi_height'][:].data)
z_limits = (z_value, z_value)
elif dset.title == 'xyBASE':
z_limits = (0, 0)
else:
raise ValueError(f"Unrecognized NetCDF file title: {dset.title}")
# Define the grid limits
grid_limits = (
z_limits, # Limits for the z-axis
(-y_extent, y_extent), # Limits for the y-axis (in meters, centered)
(-x_extent, x_extent) # Limits for the x-axis (in meters, centered)
)
# Expand dimensions to include time (temporal dimension)
data = np.expand_dims(data, axis=0)
# Define grid dimensions
grid_shape = (1, data.shape[1], data.shape[2])
# Create an empty grid using PyART
grid = pyart.testing.make_empty_grid(grid_shape, grid_limits)
grid.projection = {'proj': 'pyart_aeqd', '_include_lon_0_lat_0': True}
# Configure origin parameters
grid.origin_latitude = {'data': np.array([origin_latitude])}
grid.origin_longitude = {'data': np.array([origin_longitude])}
grid.origin_altitude = {'data': np.array([origin_altitude])}
# Assign the time dictionary to the grid
grid.time = time_dictionary
# Mapping of variable types to fields
field_mapping = {
'DBZH': {'name': 'reflectivity', 'units': 'dBZ', 'long_name': 'Reflectivity'},
'DBZV': {'name': 'reflectivity_vertical', 'units': 'dBZ', 'long_name': 'Reflectivity'},
'VELH': {'name': 'velocity', 'units': 'm/s', 'long_name': 'Radial Velocity'},
'VELV': {'name': 'velocity_vertical', 'units': 'm/s', 'long_name': 'Radial Velocity'},
'WIDTHH': {'name': 'spectrum_width', 'units': 'm/s', 'long_name': 'Spectrum Width'},
'WIDTHV': {'name': 'spectrum_width_vertical', 'units': 'm/s', 'long_name': 'Spectrum Width'},
'ZDR': {'name': 'differential_reflectivity', 'units': 'dB', 'long_name': 'Differential Reflectivity'},
'RHOHV': {'name': 'correlation_coefficient', 'units': '', 'long_name': 'Correlation Coefficient'},
'PHIDP': {'name': 'differential_phase', 'units': 'degrees', 'long_name': 'Differential Phase'},
'KDP': {'name': 'specific_differential_phase', 'units': 'degrees/km', 'long_name': 'Specific Differential Phase'},
'SNRHC': {'name': 'signal_to_noise_ratio_h', 'units': 'dB', 'long_name': 'Signal-to-Noise Ratio H'},
'SNRVC': {'name': 'signal_to_noise_ratio_v', 'units': 'dB', 'long_name': 'Signal-to-Noise Ratio V'},
'HMC': {'name': 'hydrometeor_classification', 'units': '', 'long_name': 'Hydrometeor Classification'},
'RR': {'name': 'rain_rate', 'units': 'mm/h', 'long_name': 'Rain Rate'},
}
# Verify if the field is valid
if data_field in field_mapping:
field_info = field_mapping[data_field]
grid.fields = {
field_info['name']: {
'data': data,
'units': getattr(dset.variables[data_field], 'units', field_info['units']),
'long_name': getattr(dset.variables[data_field], 'long_name', field_info['long_name']),
'standard_name': getattr(dset.variables[data_field], 'standard_name', field_info['name']),
'_FillValue': getattr(dset.variables[data_field], '_FillValue', -9999.0)
}
}
else:
raise ValueError(f"Unsupported data field: {data_field}")
return grid
finally:
# Close the NetCDF dataset
dset.close()