Module ephemeris_library.frame_translation
Expand source code
import ephemeris_library.common as common
import ephemeris_library.rotation_libraries.lib_epop_geopack as lib_epop_geopack
import ephemeris_library.rotation_libraries.lib_j2k_geo_rotation as lib_j2k_geo_rotation
import ephemeris_library.rotation_libraries.lib_quaternion as lib_quaternion
import numpy
from numpy.lib import recfunctions
from typing import Union
AVAILABLE_FRAMES_BY_NAME = ["Geodetic", "Geomagnetic", "Geocentric Solar Magnetospheric", "Geocentric Solar Wind", \
"Geocentric Solar Ecliptic", "Solar Magnetospheric", "Magnetic Local Time", "Inertial"]
AVAILABLE_FRAMES = ['geod', 'mag', 'gsm', 'gsw', 'gse', 'sm', 'mlt', 'j2k']
def add_frame( data,
frame_list: Union[str, list],
cartesian: bool = True,
spherical: bool = False ):
'''
Rotate the given Geographic ephemeris data into the requested frame(s).
Available frames:
Acronyms: geod, mag, gsm, gsw, gse, sm, mlt, j2k
Full Name: Geodetic, Geomagnetic, Geocentric Solar Magnetospheric,
Geocentric Solar Wind, Geocentric Solar Ecliptic, Solar Magnetospheric
Magnetic Local Time, Inertial
Arguments:
data: Structrued Array of ephemeris data. Only considers 'met' & geo_[x|y|z] columns
frame_list: a string, or list of strings, corresponding to the frame the data is to be rotated into.
Must be bound within either of the the 'Available frame' lists above.
cartesian: boolean. Default True.
Return the dataset in Cartesian coordinates.
spherical: boolean. Default False.
Return the dataset in Spherical coordinates, including the GEO.
Return:
data: numpy structured array.
Appends to the given data object.
All structs return float64 datatype.
Cartesian Names:
names: [<frame>_x, <frame>_y, <frame_z>] (Eg: 'gsm_x')
units: [meters, meters, meters]
Spherical Names:
names: [<frame>_lat, <frame>_lon, <frame>_alt]
units: [degrees, degrees, meters geocentric ]
geod:
names: ['geod_lat', 'geod_lon', 'geod_alt' ]
units: [ degrees, degrees, meters geodetic ]
mlt:
names: ['mlt_hour']
unit: [ hours ]
Note:
Geodetic rotations disregard the 'cartesian/spherical' argument, returning all columns.
Translation follows the following chains:
geo->j2k
->geod
->mag->mlt
->gsm->gsw
->gse
->sm
'''
# Sanity checks:
# Data structure must include 'met', 'geo_x', 'geo_y', 'geo_z'
if any([frame not in data.dtype.names for frame in ['met', 'geo_x','geo_y','geo_z'] ]):
raise ValueError("Given data object does not contain one of the requisite 'met','geo_x/y/z' columns!")
# If given an empty list, None, or an empty string, raise a ValueError
if frame_list is None or frame_list == '' or len(frame_list) == 0:
raise ValueError("Given an invalid entry for the frame_list! Please give a string or list corresponding to the available frames!")
# Make sure at least one of the cart/sph options are chosen.
if cartesian == spherical == False:
raise ValueError("Asked to return neither Cartesian or Spherical coordinates! Please make true at least one.")
# If the frame-list is a string, cast to list.
if isinstance( frame_list, str ):
frame_list = [frame_list]
# Then translate the frames to their acronyms as needed.
frame_list = [ _decode_frame(frame) for frame in frame_list ]
# _decode_frame does the value-checking as well.
#############
# Derive the list of desired frames on the output, as to filter away any intermediate solutions.
initial_columns = list(data.dtype.names).copy() # Contains _x/y/z/lat/lon/alt enteries.
initial_request = frame_list.copy() # A subset of AVAILABLE_FRAMES
output_columns = initial_columns
for frame in frame_list:
# Weird cases: geod, mlt.
if frame == 'geod':
if 'geod_lat' not in output_columns: output_columns.append('geod_lat')
if 'geod_lon' not in output_columns: output_columns.append('geod_lon')
if 'geod_alt' not in output_columns: output_columns.append('geod_alt')
continue # It's independant of the cart/sph names.
if frame == 'mlt':
if 'mlt_hour' not in output_columns: output_columns.append('mlt_hour')
continue
# And then append the cart/sph column names.
if cartesian == True:
if frame+"_x" not in output_columns: output_columns.append(frame+"_x")
if frame+"_y" not in output_columns: output_columns.append(frame+"_y")
if frame+"_z" not in output_columns: output_columns.append(frame+"_z")
if spherical == True:
if frame+"_lat" not in output_columns: output_columns.append(frame+"_lat")
if frame+"_lon" not in output_columns: output_columns.append(frame+"_lon")
if frame+"_alt" not in output_columns: output_columns.append(frame+"_alt")
######
# Add any intermediate dependencies as needed.
if 'sm' in frame_list and 'gsm' not in frame_list: frame_list += ['gsm']
if 'gse' in frame_list and 'gsm' not in frame_list: frame_list += ['gsm']
if 'gsw' in frame_list and 'gsm' not in frame_list: frame_list += ['gsm']
if 'mlt' in frame_list and 'mag' not in frame_list: frame_list += ['mag']
######
# Initialize the IGRF data
lib_epop_geopack.init_igrf()
# Translate the MET to Universal Time (Unix time)
ut = data['met'] - 50716800
# Recalculate constants for the given times:
lib_epop_geopack.recalc( ut )
# And do processing as needed.
# Make sure to follow the dependency chain.
#geo->geod
# ->mag
# ->gsm->gsw
# ->gse
# ->sm
# Create the cartesian coordinates regardless of request.
if 'geod' in frame_list:
data = _add_geodetic( data )
if 'mag' in frame_list:
data = _add_geomagnetic( data )
if 'gsm' in frame_list:
data = _add_geocentric_solar_magnetospheric( data )
if 'gse' in frame_list:
data = _add_geocentric_solar_ecliptic( data )
if 'gsw' in frame_list:
data = _add_geocentric_solar_wind( data )
if 'sm' in frame_list:
data = _add_solar_magnetospheric( data )
if 'mlt' in frame_list:
data = _add_magnetic_local_time( data )
if 'j2k' in frame_list:
data = _add_inertial_frame( data )
# And only produce the Spherical if requested.
if spherical == True:
for frame in frame_list:
# Ignore Geodetic + MLT, as they're edge cases, as it's an edge case.
if frame in ['geod', 'mlt']: continue
# Otherwise, add the spherical coordinates as requested.
data = _add_spherical( data, frame )
#####
# And then filter any columns that aren't in either the given struct or the requested struct.
data = data[ output_columns ]
# And then make sure to return a nominal copy, rather than a view.
if data.base is not None:
data = data.copy()
# Occassionally, dataset will be build with non-linear writing, which is slow to access.
# Repack the dataset as to guarnantee the output format is consistent.
data = recfunctions.repack_fields(data)
return data
def add_nadir_solar_angles( data ):
'''
Adds solar angle information based on the ORF/Nadir frame.
Requires J2k ephemeris, as requires velocity information to derive the orbital plane.
Arguments:
data: Structrued Array of ephemeris data. Only considers 'met' & j2k_[x|y|z|vx|vy|vz] columns
Return:
data: numpy structured array.
Appends to the given data object.
All structs return float64 datatype.
Names: sun_zenith, sun_azimuth, sun_beta
Units: degrees, degrees, degrees
'''
# Sanity checks:
if any([in_frame not in data.dtype.names for in_frame in ['j2k_x','j2k_y','j2k_z','j2k_vx','j2k_vy','j2k_vz'] ]):
raise ValueError("Given data object does not contain one of the requisite 'j2k_x/y/z/vx/vy/vz' columns!")
##############
# All values are processed off of the data['met'] column, so run the recalc function preemtively.
lib_epop_geopack.recalc( data['met'] - 50716800 )
# And pre-calculate the Sun-Vector in the J2K frame, for sanity.
# Use the GSM-definition of X=Sun_Vector to simplify the conversionts:
sun_vector_gsm = numpy.full( ( len(data) ,3), [1.,0.,0.]) # An Nx3 array of <1,0,0>.
sun_x_gsm, sun_y_gsm, sun_z_gsm = sun_vector_gsm.T
# Translate to GEO, then to J2K.
sun_x_geo, sun_y_geo, sun_z_geo = lib_epop_geopack.geogsm( sun_x_gsm, sun_y_gsm, sun_z_gsm, -1 )
sun_x_j2k, sun_y_j2k, sun_z_j2k = lib_j2k_geo_rotation.GEO_to_J2K( data['met'], sun_x_geo, sun_y_geo, sun_z_geo )
# And merge the J2K-Frame sun-vector for processing:
sun_vector_j2k = numpy.column_stack(( sun_x_j2k, sun_y_j2k, sun_z_j2k ))
# And also extract the Nx3 arrays of position & Velocity in J2K, as they're needed as a block
# This also makes the NP-procesing a -lot- simpler.
_position_j2k = numpy.column_stack(( data['j2k_x'], data['j2k_y'], data['j2k_z'] ))
_velocity_j2k = numpy.column_stack(( data['j2k_vx'], data['j2k_vy'], data['j2k_vz'] ))
###################
# Solar Beta Angle:
# Derive the orbit-normal from the J2K ephemeris data.
orbit_normal_j2k = numpy.cross( _position_j2k, _velocity_j2k )
# And normalize the orbit-normal vector.
orbit_normal_j2k = (orbit_normal_j2k.T / numpy.linalg.norm(orbit_normal_j2k, axis=1)).T
# And calculate the sin-angle between the two systems.
solar_beta_angle = numpy.arcsin( numpy.sum(orbit_normal_j2k * sun_vector_j2k, axis=1) ) * 180/numpy.pi
######
# Solar Zenith & Azimuth Angles:
# Summary: values are difficult to understand as a function of frames, so first translate all vectors into the Nadir/CRF frame.
# Zenith angle is the angle off of the -Z axis (Zenith)
# Azimuth is the angle off of the +X axis. (Ram)
sun_gsm = numpy.full( orbit_normal_j2k.shape, [1.,0.,0.])
sun_x_gsm, sun_y_gsm, sun_z_gsm = sun_gsm.T
# Translate to GEO, then J2K
sun_x_geo, sun_y_geo, sun_z_geo = lib_epop_geopack.geogsm( sun_x_gsm, sun_y_gsm, sun_z_gsm, -1 )
sun_x_j2k, sun_y_j2k, sun_z_j2k = lib_j2k_geo_rotation.GEO_to_J2K( data['met'], sun_x_geo, sun_y_geo, sun_z_geo )
# Using the position & Velocity data values, derive the Nadir/CRF frame.
# +Z = Nadir = -Position
# +Y = +Z cross Velocity
# +X = +Y cross +Z
Nadir_Z = - _position_j2k
Nadir_Y = numpy.cross( Nadir_Z, _velocity_j2k )
Nadir_X = numpy.cross( Nadir_Y, Nadir_Z )
# Normalize all the vectors:
Nadir_X = (Nadir_X.T / numpy.linalg.norm(Nadir_X, axis=1)).T
Nadir_Y = (Nadir_Y.T / numpy.linalg.norm(Nadir_Y, axis=1)).T
Nadir_Z = (Nadir_Z.T / numpy.linalg.norm(Nadir_Z, axis=1)).T
# Then merge these subvectors along the primary axis
# And that results in a CRF/Nadir->J2K rotation matrix.
# np.dstack stacks side-by-side.
crf_to_j2k_rotation_matrix = numpy.dstack(( Nadir_X, Nadir_Y, Nadir_Z ))
# For sanity's sake, don't try to dot-product everything in Numpy.
# Use the Lib_Quaternion Quaternion mathematics to rotate all the vectors.
crf_to_j2k_quaternions = lib_quaternion.Rotation_Matrix_to_Quaternion( crf_to_j2k_rotation_matrix )
# And conjugate it to get J2K->CRF
j2k_to_crf_quaternions = lib_quaternion.Conjugate( crf_to_j2k_quaternions )
# And use the vector-rotate to get everything in the CRF frame.
sun_vector_crf = lib_quaternion.Vector_Rotate( j2k_to_crf_quaternions, sun_vector_j2k )
sun_x_crf, sun_y_crf, sun_z_crf = sun_vector_crf.T
# And from there, the Solar-Zenith angle is just the angle from the -Z vector:
solar_zenith_angle = 90 - numpy.arcsin( -sun_z_crf ) * 180/numpy.pi
# And the solar-Azimuth is just the angle off of the +X
solar_azimuth_angle = numpy.arctan2( sun_y_crf, sun_x_crf ) * 180/numpy.pi
# First filter away the data_columns, if present:
data = data[[ name for name in data.dtype.names if name not in ["sun_zenith","sun_azimuth","sun_beta"]]]
# And append to the given dataset.
_names = ["sun_zenith","sun_azimuth","sun_beta"]
_types = [ float, float, float ]
data = recfunctions.append_fields( data, _names, [solar_zenith_angle, solar_azimuth_angle, solar_beta_angle], dtypes=_types, usemask=False)
# And make sure that the returned dataset is it's own thing, and not a view.
if data.base is not None:
data = data.copy()
return data
def _add_frame_geo( data ):
'''
Rotate the given Inertial/Celestial ephemeris data into the Geographic frame.
Seperated from the 'add_frame' to maintain the 'geo'-only dependency.
Arguments:
data: Structrued Array of ephemeris data. Only considers 'met' & j2k_[x|y|z] columns
Return:
data: numpy structured array.
Appends to the given data object.
names: ['geo_x', 'geo_y', 'geo_z' ]
units: [ meters, meters, meters ]
dtype: [float64, float64, float64]
'''
# Sanity checks:
# Data structure must include 'met', 'j2k_x', 'j2k_y', 'j2k_z'
if any([frame not in data.dtype.names for frame in ['met', 'j2k_x','j2k_y','j2k_z'] ]):
raise ValueError("Given data object does not contain one of the requisite 'met','j2k_x/y/z' columns!")
data = _add_geographic_frame( data )
return data
### GEO-dependant frames:
def _add_geodetic( data ):
'''
Append the Geodetic position information to the given dataset.
Arguments:
data: Structrued Array of ephemeris data. Only considers 'met' & geo_[x|y|z] columns
Return:
data: numpy structured array.
Appends to the given data object.
All structs return float64 datatype.
names: ['geod_lat', 'geod_lon', 'geod_alt' ]
units: [ degrees, degrees, meters above earth (geodetic) ]
'''
# Check that all 'geo_x/y/z' contained within the given data:
if any([in_frame not in data.dtype.names for in_frame in ['geo_x','geo_y','geo_z'] ]):
raise ValueError("Given data object does not contain one of the requisite 'geo_x/y/z' columns!")
# And if any of the output columns are in the input frame, remove them for reprocessing.
if any([out_frame in data.dtype.names for out_frame in ['geod_lat', 'geod_lon', 'geod_alt'] ]):
data = data[[ column for column in data.dtype.names if column not in ['geod_lat', 'geod_lon', 'geod_alt'] ]]
# lib_epop_geopack geodgeo() requires args given in non-meter values:
# r: Geocentic distance in kilometers
# theta: Spherical co-latitude in radians.
# Colatitude = 90*-Latitude (EG: North pole = 0, South_Pole = 180)
geocentric_altitude = ( data['geo_x']**2 + data['geo_y']**2 + data['geo_z']**2 )**0.5 #METERS.
geocentric_colatitude = numpy.arccos( data['geo_z'] / geocentric_altitude ) #RADIANS
# Colatitude = 90*-Latitude (EG: North pole = 0, South_Pole = 180
# Needs to be fed as radians, which it currently is.
# And translate the meters to kilometers.
geocentric_altitude /= 1000 # Meters->Kilometers.
geod_h, geod_xmu = lib_epop_geopack.geodgeo( geocentric_altitude, geocentric_colatitude, -1 )
# Return values are also odd.
# h = Geodetic Altitude, in Kilometers.
# xmu = Geodetic Latitude, in Radians. (Normal latitude, not colatitude)
# Translate the return values to meters & degrees.
geod_alt = geod_h * 1000
geod_lat = geod_xmu * 180/numpy.pi
# Geodetic longitude using the WGS84 model == Geocentric longitude.
geod_lon = numpy.arctan2( data['geo_y'], data['geo_x'] ) * 180/numpy.pi
# And append to the given dataset.
_names = ['geod_lat', 'geod_lon', 'geod_alt' ]
_types = [ float, float, float ]
data = recfunctions.append_fields( data, _names, [geod_lat, geod_lon, geod_alt], dtypes=_types, usemask=False)
return data
def _add_geomagnetic( data ):
'''
Append the geomagnetic position information to the given dataset.
Frame definition:
Y: Through intersection of magnetic equator and geographic meridian 90 degrees east of the meridian containing the dipole axis
Z: Along the Dipole axis.
Arguments:
data: Structrued Array of ephemeris data. Only considers 'met' & geo_[x|y|z] columns
Return:
data: numpy structured array.
Appends to the given data object.
All structs return float64 datatype.
names: ['mag_x', 'mag_y', 'mag_z']
units: [meters, meters, meters]
'''
# Check that all 'geo_x/y/z' contained within the given data:
if any([frame not in data.dtype.names for frame in ['geo_x','geo_y','geo_z'] ]):
raise ValueError("Given data object does not contain one of the requisite 'geo_x/y/z' columns!")
# And if any of the output columns are in the input frame, remove them for reprocessing.
if any([out_frame in data.dtype.names for out_frame in ['mag_x', 'mag_y', 'mag_z'] ]):
data = data[[ column for column in data.dtype.names if column not in ['mag_x', 'mag_y', 'mag_z']]]
# Simple cartesian rotation, so can just feed data directly.
mag_x, mag_y, mag_z = lib_epop_geopack.geomag( data['geo_x'], data['geo_y'], data['geo_z'], 1 )
# And then add the new columns to the dataset:
_names = ['mag_x', 'mag_y', 'mag_z' ]
_types = [ float, float, float ]
data = recfunctions.append_fields( data, _names, [mag_x, mag_y, mag_z], _types, usemask=False)
return data
def _add_geocentric_solar_magnetospheric( data ):
'''
Append the GSM/Geocentric Solar Magnetospheric position information to the given dataset.
Frame Definition:
X: Towards the sun
Z: Projection of the dipole axis in the Y-Z GSE plane.
Arguments:
data: Structrued Array of ephemeris data. Only considers 'met' & geo_[x|y|z] columns
Return:
data: numpy structured array.
Appends to the given data object.
All structs return float64 datatype.
names: ['gsm_x', 'gsm_y', 'gsm_z' ]
units: [ meters, meters, meters ]
'''
# Check that all 'geo_x/y/z' contained within the given data:
if any([frame not in data.dtype.names for frame in ['geo_x','geo_y','geo_z'] ]):
raise ValueError("Given data object does not contain one of the requisite 'geo_x/y/z' columns!")
# And if any of the output columns are in the input frame, remove them for reprocessing.
if any([out_frame in data.dtype.names for out_frame in ['gsm_x', 'gsm_y', 'gsm_z'] ]):
data = data[[ column for column in data.dtype.names if column not in ['gsm_x', 'gsm_y', 'gsm_z']]]
# Simple cartesian rotation, so can just feed data directly.
gsm_x, gsm_y, gsm_z = lib_epop_geopack.geogsm( data['geo_x'], data['geo_y'], data['geo_z'], 1 )
# And then add the new columns to the dataset:
_names = ['gsm_x', 'gsm_y', 'gsm_z' ]
_types = [ float, float, float ]
data = recfunctions.append_fields( data, _names, [gsm_x, gsm_y, gsm_z], _types, usemask=False)
return data
#####
# Non-GEO dependant functions:
def _add_geocentric_solar_ecliptic( data ):
'''
Append the GSE/Geocentric Solar Ecliptic position information to the given dataset.
Frame Definition:
X: Towards the sun
Y: Opposite to earth's orbit.
Z: Perpendicular to the ecliptic plane
Arguments:
data: Structrued Array of ephemeris data. Only considers 'met' & gsm_[x|y|z] columns
Return:
data: numpy structured array.
Appends to the given data object.
All structs return float64 datatype.
names: ['gse_x', 'gse_y', 'gse_z' ]
units: [ meters, meters, meters ]
'''
# Check that all 'geo_x/y/z' contained within the given data:
if any([frame not in data.dtype.names for frame in ['gsm_x','gsm_y','gsm_z'] ]):
raise ValueError("Given data object does not contain one of the requisite 'gsm_x/y/z' columns!")
# And if any of the output columns are in the input frame, remove them for reprocessing.
if any([out_frame in data.dtype.names for out_frame in ['gse_x', 'gse_y', 'gse_z'] ]):
data = data[[ column for column in data.dtype.names if column not in ['gse_x', 'gse_y', 'gse_z']]]
# Simple cartesian rotation, so can just feed data directly.
gse_x, gse_y, gse_z = lib_epop_geopack.gsmgse( data['gsm_x'], data['gsm_y'], data['gsm_z'], 1 )
# And then add the new columns to the dataset:
_names = ['gse_x', 'gse_y', 'gse_z' ]
_types = [ float, float, float ]
data = recfunctions.append_fields( data, _names, [gse_x, gse_y, gse_z], _types, usemask=False)
return data
def _add_geocentric_solar_wind( data ):
'''
Append the GSW/Geocentric Solar Wind position information to the given dataset.
Equivalent to the GSM field where the given solar wind vectors are anchored at
Vy=0, vZ=0
Frame Definition:
X: Towards the sun
Z: Projection of the dipole axis in the Y-Z GSE plane.
Arguments:
data: Structrued Array of ephemeris data. Only considers 'met' & gsm_[x|y|z] columns
Return:
data: numpy structured array.
Appends to the given data object.
All structs return float64 datatype.
names: ['gsw_x', 'gsw_y', 'gsw_z' ]
units: [ meters, meters, meters ]
'''
# Check that all 'geo_x/y/z' contained within the given data:
if any([frame not in data.dtype.names for frame in ['gsm_x','gsm_y','gsm_z'] ]):
raise ValueError("Given data object does not contain one of the requisite 'gsm_x/y/z' columns!")
# And if any of the output columns are in the input frame, remove them for reprocessing.
if any([out_frame in data.dtype.names for out_frame in ['gsw_x', 'gsw_y', 'gsw_z'] ]):
data = data[[ column for column in data.dtype.names if column not in ['gsw_x', 'gsw_y', 'gsw_z']]]
# Simple cartesian rotation, so can just feed data directly.
gsw_x, gsw_y, gsw_z = lib_epop_geopack.gswgsm( data['gsm_x'], data['gsm_y'], data['gsm_z'], -1 )
# And then add the new columns to the dataset:
_names = ['gsw_x', 'gsw_y', 'gsw_z' ]
_types = [ float, float, float ]
data = recfunctions.append_fields( data, _names, [gsw_x, gsw_y, gsw_z], _types, usemask=False)
return data
def _add_solar_magnetospheric( data ):
'''
Append the SM/Solar Magnetospheric position information to the given dataset.
Frame Definition:
X: In the plane containing the Earth-Sun line and the dipole axis
Z: Along the dipole axis
Arguments:
data: Structrued Array of ephemeris data. Only considers 'met' & gsm_[x|y|z] columns
Return:
data: numpy structured array.
Appends to the given data object.
All structs return float64 datatype.
names: ['sm_x', 'sm_y', 'sm_z']
units: [ meters, meters, meters ]
'''
# Check that all 'geo_x/y/z' contained within the given data:
if any([frame not in data.dtype.names for frame in ['gsm_x','gsm_y','gsm_z'] ]):
raise ValueError("Given data object does not contain one of the requisite 'gsm_x/y/z' columns!")
# And if any of the output columns are in the input frame, remove them for reprocessing.
if any([out_frame in data.dtype.names for out_frame in ['sm_x', 'sm_y', 'sm_z'] ]):
data = data[[ column for column in data.dtype.names if column not in ['sm_x', 'sm_y', 'sm_z']]]
# Simple cartesian rotation, so can just feed data directly.
sm_x, sm_y, sm_z = lib_epop_geopack.smgsm( data['gsm_x'], data['gsm_y'], data['gsm_z'], -1 )
# And then add the new columns to the dataset:
_names = ['sm_x', 'sm_y', 'sm_z' ]
_types = [ float, float, float ]
data = recfunctions.append_fields( data, _names, [sm_x, sm_y, sm_z], _types, usemask=False)
return data
def _add_magnetic_local_time( data ):
'''
Append the MLT/Magnetic Local Time hour-angle information to the given dataset.
Arguments:
data: Structrued Array of ephemeris data. Only considers 'met' & mag_[x|y|z] columns
Return:
data: numpy structured array.
Appends to the given data object.
All structs return float64 datatype.
names: ['mlt_hour']
units: [ hour-angle ]
'''
if any([frame not in data.dtype.names for frame in ['mag_x','mag_y'] ]):
raise ValueError("Given data object does not contain one of the requisite 'mag_x/y' columns!")
# And if any of the output columns are in the input frame, remove them for reprocessing.
if any([out_frame in data.dtype.names for out_frame in ['mlt_hour'] ]):
data = data[[ column for column in data.dtype.names if column not in ['mlt_hour']]]
# Dependant on the pseudo-sun vector in the SM/solar magnetospheric frame.
sun_sm_x, sun_sm_y, sun_sm_z = 1, 0, 0 # Not not the actual sun-vector.
sun_gsm_x, sun_gsm_y, sun_gsm_z = lib_epop_geopack.smgsm( sun_sm_x, sun_sm_y, sun_sm_z, -1 )
sun_mag_x, sun_mag_y, sun_mag_z = lib_epop_geopack.magsm( sun_gsm_x, sun_gsm_y, sun_gsm_z, -1 )
sum_mag_lon = numpy.arctan2( sun_mag_y, sun_mag_x ) * 180/numpy.pi
given_mag_lon = numpy.arctan2( data['mag_y'], data['mag_x'] ) * 180/numpy.pi
mlt = given_mag_lon - sum_mag_lon + 180
mlt /= 15. # Degree -> Hour-angle (0-24)
mlt %= 24. # And make sure it's non-negative.
# And then add the new columns to the dataset:
_names = ['mlt_hour' ]
_types = [ float ]
data = recfunctions.append_fields( data, _names, [mlt], _types, usemask=False)
return data
def _add_inertial_frame( data ):
'''
Append the Inertial/Celestial cartesian information to the given dataset.
Arguments:
data: Structrued Array of ephemeris data. Only considers 'met' & geo_[x|y|z] columns
Return:
data: numpy structured array.
Appends to the given data object.
All structs return float64 datatype.
names: ['j2k_x', 'j2k_y', 'j2k_z' ]
units: [ meters, meters, meters ]
dtype: [float64, float64, float64]
'''
# Check that all 'geo_x/y/z' contained within the given data:
if any([frame not in data.dtype.names for frame in ['geo_x','geo_y','geo_z'] ]):
raise ValueError("Given data object does not contain one of the requisite 'geo_x/y/z' columns!")
# And if any of the output columns are in the input frame, remove them for reprocessing to prevent exceptions on merge.
if any([out_frame in data.dtype.names for out_frame in ['j2k_x', 'j2k_y', 'j2k_z'] ]):
data = data[[ column for column in data.dtype.names if column not in ['j2k_x', 'j2k_y', 'j2k_z']]]
# And then fetch the appropriate data:
j2k_x, j2k_y, j2k_z = lib_j2k_geo_rotation.GEO_to_J2K( data['met'], data['geo_x'], data['geo_y'], data['geo_z'])
# And then add the new columns to the dataset:
_names = ['j2k_x', 'j2k_y', 'j2k_z' ]
_types = [ float, float, float ]
data = recfunctions.append_fields( data, _names, [j2k_x, j2k_y, j2k_z], _types, usemask=False)
return data
def _add_geographic_frame( data ):
'''
Append the Geographic cartesian information to the given dataset from the Celestial/Inertial cartesian dataset.
Arguments:
data: Structrued Array of ephemeris data. Only considers 'met' & j2k_[x|y|z] columns
Return:
data: numpy structured array.
Appends to the given data object.
All structs return float64 datatype.
names: ['geo_x', 'geo_y', 'geo_z' ]
units: [ meters, meters, meters ]
dtype: [float64, float64, float64]
'''
# Check that all 'j2k_x/y/z' contained within the given data:
if any([frame not in data.dtype.names for frame in ['j2k_x','j2k_y','j2k_z'] ]):
raise ValueError("Given data object does not contain one of the requisite 'geo_x/y/z' columns!")
# And if any of the output columns are in the input frame, remove them for reprocessing to prevent exceptions on merge.
if any([out_frame in data.dtype.names for out_frame in ['geo_x', 'geo_y', 'geo_z'] ]):
data = data[[ column for column in data.dtype.names if column not in ['geo_x', 'geo_y', 'geo_z']]]
# And then fetch the appropriate data:
geo_x, geo_y, geo_z = lib_j2k_geo_rotation.J2K_to_GEO( data['met'], data['j2k_x'], data['j2k_y'], data['j2k_z'])
# And then add the new columns to the dataset:
_names = ['geo_x', 'geo_y', 'geo_z' ]
_types = [ float, float, float ]
data = recfunctions.append_fields( data, _names, [geo_x, geo_y, geo_z], _types, usemask=False)
return data
def _add_spherical( data, frame ):
'''
Append the Spherical Coordinate values given the Cartesian coordinates & the desired frame.
Altittude in meters above the WGS84 spheroid.
Arguments:
data: Structrued Array of ephemeris data.
Must include the <frame>_x/y/z columns and the geo_x/y/z column
Return:
data: numpy structured array.
Appends to the given data object.
All structs return float64 datatype.
names: ['<frame>_lat', '<frame>_lon', '<frame>_alt']
units: [ degrees, degrees, meters ]
'''
# Check that all required columns are pressent
car_x, car_y, car_z = frame+"_x", frame+"_y", frame+"_z"
sph_lat, sph_lon, sph_alt = frame+"_lat", frame+"_lon", frame+"_alt"
# Make sure the data has the cartesian dependencies
if any([in_frame not in data.dtype.names for in_frame in [car_x, car_y, car_z] ]):
raise ValueError("_add_spherical given data object does not contain one of the requisite '{:s}_x/y/z' columns!".format(frame))
# And if the system already has the spherical coordinates, remove them.
if any([out_frame in data.dtype.names for out_frame in [sph_lat, sph_lon, sph_alt] ]):
data = data[[ column for column in data.dtype.names if column not in [sph_lat, sph_lon, sph_alt]]]
# For readability, extract the columns.
x,y,z = data[car_x], data[car_y], data[car_z]
alt_data = (x**2 + y**2 + z**2) ** 0.5 # Geocentric altitutde (Meters from core)
lat_data = numpy.arcsin ( z / alt_data ) * 180 / numpy.pi # -90->90 degrees.
lon_data = numpy.arctan2( y, x) * 180 / numpy.pi # -180->180 degrees
# Update the altitutde to meters above WGS84
WGS84_earth_radius = common.get_wgs84_earth_radius_meters( data['geo_x'], data['geo_y'], data['geo_z'] )
alt_data = (x**2 + y**2 + z**2)**0.5 - WGS84_earth_radius
_names = [sph_lat, sph_lon, sph_alt]
_types = [ float, float, float ]
data = recfunctions.append_fields( data, _names, [lat_data, lon_data, alt_data], float, usemask=False)
return data
def _decode_frame( frame_str ):
'''
Given a string corresponding to one of the given coordinate frames available,
Return the 'reduced' name.
EG: 'Solar Magnetospheric' returns 'sm'.
'''
# Cast everything to lower case.
frame_str = frame_str.lower()
# If it's already truncated, then just return it.
if frame_str in AVAILABLE_FRAMES:
return frame_str
# And then check against the full string names.
for i in range(len( AVAILABLE_FRAMES_BY_NAME )):
# IF there's a match, return the acronym of that frame.
if frame_str == AVAILABLE_FRAMES_BY_NAME[i].lower():
return AVAILABLE_FRAMES[i]
# And if it's not in either the short or long format, then raise a ValueError to yell at the user.
raise ValueError("Could not understand the following frame request: '{:s}'".format(frame_str))
Functions
def add_frame(data, frame_list: Union[str, list], cartesian: bool = True, spherical: bool = False)
-
Rotate the given Geographic ephemeris data into the requested frame(s).
Available frames: Acronyms: geod, mag, gsm, gsw, gse, sm, mlt, j2k Full Name: Geodetic, Geomagnetic, Geocentric Solar Magnetospheric, Geocentric Solar Wind, Geocentric Solar Ecliptic, Solar Magnetospheric Magnetic Local Time, Inertial
Arguments
data: Structrued Array of ephemeris data. Only considers 'met' & geo_[x|y|z] columns
frame_list: a string, or list of strings, corresponding to the frame the data is to be rotated into. Must be bound within either of the the 'Available frame' lists above.
cartesian: boolean. Default True. Return the dataset in Cartesian coordinates. spherical: boolean. Default False. Return the dataset in Spherical coordinates, including the GEO.
Return
data: numpy structured array. Appends to the given data object. All structs return float64 datatype.
Cartesian Names: names: [_x, _y,
] (Eg: 'gsm_x') units: [meters, meters, meters] Spherical Names: names: [_lat, _lon, _alt] units: [degrees, degrees, meters geocentric ] geod: names: ['geod_lat', 'geod_lon', 'geod_alt' ] units: [ degrees, degrees, meters geodetic ] mlt: names: ['mlt_hour'] unit: [ hours ] Note
Geodetic rotations disregard the 'cartesian/spherical' argument, returning all columns. Translation follows the following chains: geo->j2k ->geod ->mag->mlt ->gsm->gsw ->gse ->sm
Expand source code
def add_frame( data, frame_list: Union[str, list], cartesian: bool = True, spherical: bool = False ): ''' Rotate the given Geographic ephemeris data into the requested frame(s). Available frames: Acronyms: geod, mag, gsm, gsw, gse, sm, mlt, j2k Full Name: Geodetic, Geomagnetic, Geocentric Solar Magnetospheric, Geocentric Solar Wind, Geocentric Solar Ecliptic, Solar Magnetospheric Magnetic Local Time, Inertial Arguments: data: Structrued Array of ephemeris data. Only considers 'met' & geo_[x|y|z] columns frame_list: a string, or list of strings, corresponding to the frame the data is to be rotated into. Must be bound within either of the the 'Available frame' lists above. cartesian: boolean. Default True. Return the dataset in Cartesian coordinates. spherical: boolean. Default False. Return the dataset in Spherical coordinates, including the GEO. Return: data: numpy structured array. Appends to the given data object. All structs return float64 datatype. Cartesian Names: names: [<frame>_x, <frame>_y, <frame_z>] (Eg: 'gsm_x') units: [meters, meters, meters] Spherical Names: names: [<frame>_lat, <frame>_lon, <frame>_alt] units: [degrees, degrees, meters geocentric ] geod: names: ['geod_lat', 'geod_lon', 'geod_alt' ] units: [ degrees, degrees, meters geodetic ] mlt: names: ['mlt_hour'] unit: [ hours ] Note: Geodetic rotations disregard the 'cartesian/spherical' argument, returning all columns. Translation follows the following chains: geo->j2k ->geod ->mag->mlt ->gsm->gsw ->gse ->sm ''' # Sanity checks: # Data structure must include 'met', 'geo_x', 'geo_y', 'geo_z' if any([frame not in data.dtype.names for frame in ['met', 'geo_x','geo_y','geo_z'] ]): raise ValueError("Given data object does not contain one of the requisite 'met','geo_x/y/z' columns!") # If given an empty list, None, or an empty string, raise a ValueError if frame_list is None or frame_list == '' or len(frame_list) == 0: raise ValueError("Given an invalid entry for the frame_list! Please give a string or list corresponding to the available frames!") # Make sure at least one of the cart/sph options are chosen. if cartesian == spherical == False: raise ValueError("Asked to return neither Cartesian or Spherical coordinates! Please make true at least one.") # If the frame-list is a string, cast to list. if isinstance( frame_list, str ): frame_list = [frame_list] # Then translate the frames to their acronyms as needed. frame_list = [ _decode_frame(frame) for frame in frame_list ] # _decode_frame does the value-checking as well. ############# # Derive the list of desired frames on the output, as to filter away any intermediate solutions. initial_columns = list(data.dtype.names).copy() # Contains _x/y/z/lat/lon/alt enteries. initial_request = frame_list.copy() # A subset of AVAILABLE_FRAMES output_columns = initial_columns for frame in frame_list: # Weird cases: geod, mlt. if frame == 'geod': if 'geod_lat' not in output_columns: output_columns.append('geod_lat') if 'geod_lon' not in output_columns: output_columns.append('geod_lon') if 'geod_alt' not in output_columns: output_columns.append('geod_alt') continue # It's independant of the cart/sph names. if frame == 'mlt': if 'mlt_hour' not in output_columns: output_columns.append('mlt_hour') continue # And then append the cart/sph column names. if cartesian == True: if frame+"_x" not in output_columns: output_columns.append(frame+"_x") if frame+"_y" not in output_columns: output_columns.append(frame+"_y") if frame+"_z" not in output_columns: output_columns.append(frame+"_z") if spherical == True: if frame+"_lat" not in output_columns: output_columns.append(frame+"_lat") if frame+"_lon" not in output_columns: output_columns.append(frame+"_lon") if frame+"_alt" not in output_columns: output_columns.append(frame+"_alt") ###### # Add any intermediate dependencies as needed. if 'sm' in frame_list and 'gsm' not in frame_list: frame_list += ['gsm'] if 'gse' in frame_list and 'gsm' not in frame_list: frame_list += ['gsm'] if 'gsw' in frame_list and 'gsm' not in frame_list: frame_list += ['gsm'] if 'mlt' in frame_list and 'mag' not in frame_list: frame_list += ['mag'] ###### # Initialize the IGRF data lib_epop_geopack.init_igrf() # Translate the MET to Universal Time (Unix time) ut = data['met'] - 50716800 # Recalculate constants for the given times: lib_epop_geopack.recalc( ut ) # And do processing as needed. # Make sure to follow the dependency chain. #geo->geod # ->mag # ->gsm->gsw # ->gse # ->sm # Create the cartesian coordinates regardless of request. if 'geod' in frame_list: data = _add_geodetic( data ) if 'mag' in frame_list: data = _add_geomagnetic( data ) if 'gsm' in frame_list: data = _add_geocentric_solar_magnetospheric( data ) if 'gse' in frame_list: data = _add_geocentric_solar_ecliptic( data ) if 'gsw' in frame_list: data = _add_geocentric_solar_wind( data ) if 'sm' in frame_list: data = _add_solar_magnetospheric( data ) if 'mlt' in frame_list: data = _add_magnetic_local_time( data ) if 'j2k' in frame_list: data = _add_inertial_frame( data ) # And only produce the Spherical if requested. if spherical == True: for frame in frame_list: # Ignore Geodetic + MLT, as they're edge cases, as it's an edge case. if frame in ['geod', 'mlt']: continue # Otherwise, add the spherical coordinates as requested. data = _add_spherical( data, frame ) ##### # And then filter any columns that aren't in either the given struct or the requested struct. data = data[ output_columns ] # And then make sure to return a nominal copy, rather than a view. if data.base is not None: data = data.copy() # Occassionally, dataset will be build with non-linear writing, which is slow to access. # Repack the dataset as to guarnantee the output format is consistent. data = recfunctions.repack_fields(data) return data
def add_nadir_solar_angles(data)
-
Adds solar angle information based on the ORF/Nadir frame. Requires J2k ephemeris, as requires velocity information to derive the orbital plane.
Arguments
data: Structrued Array of ephemeris data. Only considers 'met' & j2k_[x|y|z|vx|vy|vz] columns
Return
data: numpy structured array. Appends to the given data object. All structs return float64 datatype.
Names: sun_zenith, sun_azimuth, sun_beta Units: degrees, degrees, degrees
Expand source code
def add_nadir_solar_angles( data ): ''' Adds solar angle information based on the ORF/Nadir frame. Requires J2k ephemeris, as requires velocity information to derive the orbital plane. Arguments: data: Structrued Array of ephemeris data. Only considers 'met' & j2k_[x|y|z|vx|vy|vz] columns Return: data: numpy structured array. Appends to the given data object. All structs return float64 datatype. Names: sun_zenith, sun_azimuth, sun_beta Units: degrees, degrees, degrees ''' # Sanity checks: if any([in_frame not in data.dtype.names for in_frame in ['j2k_x','j2k_y','j2k_z','j2k_vx','j2k_vy','j2k_vz'] ]): raise ValueError("Given data object does not contain one of the requisite 'j2k_x/y/z/vx/vy/vz' columns!") ############## # All values are processed off of the data['met'] column, so run the recalc function preemtively. lib_epop_geopack.recalc( data['met'] - 50716800 ) # And pre-calculate the Sun-Vector in the J2K frame, for sanity. # Use the GSM-definition of X=Sun_Vector to simplify the conversionts: sun_vector_gsm = numpy.full( ( len(data) ,3), [1.,0.,0.]) # An Nx3 array of <1,0,0>. sun_x_gsm, sun_y_gsm, sun_z_gsm = sun_vector_gsm.T # Translate to GEO, then to J2K. sun_x_geo, sun_y_geo, sun_z_geo = lib_epop_geopack.geogsm( sun_x_gsm, sun_y_gsm, sun_z_gsm, -1 ) sun_x_j2k, sun_y_j2k, sun_z_j2k = lib_j2k_geo_rotation.GEO_to_J2K( data['met'], sun_x_geo, sun_y_geo, sun_z_geo ) # And merge the J2K-Frame sun-vector for processing: sun_vector_j2k = numpy.column_stack(( sun_x_j2k, sun_y_j2k, sun_z_j2k )) # And also extract the Nx3 arrays of position & Velocity in J2K, as they're needed as a block # This also makes the NP-procesing a -lot- simpler. _position_j2k = numpy.column_stack(( data['j2k_x'], data['j2k_y'], data['j2k_z'] )) _velocity_j2k = numpy.column_stack(( data['j2k_vx'], data['j2k_vy'], data['j2k_vz'] )) ################### # Solar Beta Angle: # Derive the orbit-normal from the J2K ephemeris data. orbit_normal_j2k = numpy.cross( _position_j2k, _velocity_j2k ) # And normalize the orbit-normal vector. orbit_normal_j2k = (orbit_normal_j2k.T / numpy.linalg.norm(orbit_normal_j2k, axis=1)).T # And calculate the sin-angle between the two systems. solar_beta_angle = numpy.arcsin( numpy.sum(orbit_normal_j2k * sun_vector_j2k, axis=1) ) * 180/numpy.pi ###### # Solar Zenith & Azimuth Angles: # Summary: values are difficult to understand as a function of frames, so first translate all vectors into the Nadir/CRF frame. # Zenith angle is the angle off of the -Z axis (Zenith) # Azimuth is the angle off of the +X axis. (Ram) sun_gsm = numpy.full( orbit_normal_j2k.shape, [1.,0.,0.]) sun_x_gsm, sun_y_gsm, sun_z_gsm = sun_gsm.T # Translate to GEO, then J2K sun_x_geo, sun_y_geo, sun_z_geo = lib_epop_geopack.geogsm( sun_x_gsm, sun_y_gsm, sun_z_gsm, -1 ) sun_x_j2k, sun_y_j2k, sun_z_j2k = lib_j2k_geo_rotation.GEO_to_J2K( data['met'], sun_x_geo, sun_y_geo, sun_z_geo ) # Using the position & Velocity data values, derive the Nadir/CRF frame. # +Z = Nadir = -Position # +Y = +Z cross Velocity # +X = +Y cross +Z Nadir_Z = - _position_j2k Nadir_Y = numpy.cross( Nadir_Z, _velocity_j2k ) Nadir_X = numpy.cross( Nadir_Y, Nadir_Z ) # Normalize all the vectors: Nadir_X = (Nadir_X.T / numpy.linalg.norm(Nadir_X, axis=1)).T Nadir_Y = (Nadir_Y.T / numpy.linalg.norm(Nadir_Y, axis=1)).T Nadir_Z = (Nadir_Z.T / numpy.linalg.norm(Nadir_Z, axis=1)).T # Then merge these subvectors along the primary axis # And that results in a CRF/Nadir->J2K rotation matrix. # np.dstack stacks side-by-side. crf_to_j2k_rotation_matrix = numpy.dstack(( Nadir_X, Nadir_Y, Nadir_Z )) # For sanity's sake, don't try to dot-product everything in Numpy. # Use the Lib_Quaternion Quaternion mathematics to rotate all the vectors. crf_to_j2k_quaternions = lib_quaternion.Rotation_Matrix_to_Quaternion( crf_to_j2k_rotation_matrix ) # And conjugate it to get J2K->CRF j2k_to_crf_quaternions = lib_quaternion.Conjugate( crf_to_j2k_quaternions ) # And use the vector-rotate to get everything in the CRF frame. sun_vector_crf = lib_quaternion.Vector_Rotate( j2k_to_crf_quaternions, sun_vector_j2k ) sun_x_crf, sun_y_crf, sun_z_crf = sun_vector_crf.T # And from there, the Solar-Zenith angle is just the angle from the -Z vector: solar_zenith_angle = 90 - numpy.arcsin( -sun_z_crf ) * 180/numpy.pi # And the solar-Azimuth is just the angle off of the +X solar_azimuth_angle = numpy.arctan2( sun_y_crf, sun_x_crf ) * 180/numpy.pi # First filter away the data_columns, if present: data = data[[ name for name in data.dtype.names if name not in ["sun_zenith","sun_azimuth","sun_beta"]]] # And append to the given dataset. _names = ["sun_zenith","sun_azimuth","sun_beta"] _types = [ float, float, float ] data = recfunctions.append_fields( data, _names, [solar_zenith_angle, solar_azimuth_angle, solar_beta_angle], dtypes=_types, usemask=False) # And make sure that the returned dataset is it's own thing, and not a view. if data.base is not None: data = data.copy() return data