Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions imap_processing/tests/ultra/unit/test_de.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from imap_processing import imap_module_directory
from imap_processing.cdf.utils import load_cdf
from imap_processing.spice.repoint import get_repoint_data
from imap_processing.spice.time import met_to_ttj2000ns, ttj2000ns_to_et
from imap_processing.ultra.constants import UltraConstants
from imap_processing.ultra.l1b.de import calculate_events_in_pointing

Expand Down Expand Up @@ -138,7 +137,7 @@ def test_calculate_events_in_pointing(use_fake_repoint_data_for_time):
event_times[0] = pointing_start - 1
in_pointing = calculate_events_in_pointing(
repoint_id,
ttj2000ns_to_et(met_to_ttj2000ns(event_times)),
event_times,
)
# The first event should be False (not during a pointing), and the rest True.
assert np.all(not in_pointing[0])
Expand Down
4 changes: 4 additions & 0 deletions imap_processing/tests/ultra/unit/test_ultra_l1b.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ def test_create_de_dataset(mock_data_l1b_de_dict):
@pytest.mark.external_test_data
def test_cdf_de(
de_dataset,
aux_dataset,
use_fake_spin_data_for_time,
ancillary_files,
use_fake_repoint_data_for_time,
Expand All @@ -138,6 +139,7 @@ def test_cdf_de(
data_dict = {}
de_dataset.attrs["Repointing"] = "repoint00001"
data_dict[de_dataset.attrs["Logical_source"]] = de_dataset
data_dict[aux_dataset.attrs["Logical_source"]] = aux_dataset
# Create a spin table that cover spin 0-141
use_fake_spin_data_for_time(511000000, 511000000 + 86400 * 5)
use_fake_repoint_data_for_time(np.arange(511000000, 511000000 + 86400 * 5, 86400))
Expand All @@ -163,6 +165,7 @@ def test_cdf_de(
def test_cdf_de_flags(
mock_get_annotated_particle_velocity,
de_dataset,
aux_dataset,
use_fake_spin_data_for_time,
ancillary_files,
use_fake_repoint_data_for_time,
Expand All @@ -171,6 +174,7 @@ def test_cdf_de_flags(
data_dict = {}
de_dataset.attrs["Repointing"] = "repoint00000"
data_dict[de_dataset.attrs["Logical_source"]] = de_dataset
data_dict[aux_dataset.attrs["Logical_source"]] = aux_dataset
# Create a spin table that cover spin 0-141
use_fake_spin_data_for_time(511000000, 511000000 + 86400 * 5)
# Use repoint data that will NOT cover the event times to test flag setting
Expand Down
101 changes: 53 additions & 48 deletions imap_processing/tests/ultra/unit/test_ultra_l1b_extended.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@

from imap_processing import imap_module_directory
from imap_processing.quality_flags import ImapDEOutliersUltraFlags
from imap_processing.spice.spin import get_spin_data
from imap_processing.spice.time import met_to_ttj2000ns, ttj2000ns_to_et
from imap_processing.ultra.constants import UltraConstants
from imap_processing.ultra.l1b.lookup_utils import get_angular_profiles
Expand All @@ -25,14 +24,13 @@
get_efficiency,
get_energy_pulse_height,
get_energy_ssd,
get_eventtimes,
get_event_times,
get_front_x_position,
get_front_y_position,
get_fwhm,
get_path_length,
get_ph_tof_and_back_positions,
get_phi_theta,
get_spin_number,
get_ssd_back_position_and_tof_offset,
get_ssd_tof,
interpolate_fwhm,
Expand Down Expand Up @@ -519,63 +517,70 @@ def test_get_phi_theta(test_fixture):


@pytest.mark.external_test_data
def test_get_spin_number(test_fixture, use_fake_spin_data_for_time):
"""Tests that get_spin_number assigns the correct spin number."""
df_filt, _, _, de_dataset = test_fixture

# Simulate a spin table from MET = 0 to MET = 500*15 seconds
use_fake_spin_data_for_time(start_met=0, end_met=500 * 15)

de_met = np.array([0, 0, 5760, 5760, 5760, 5760, 5760, 5760])
de_spin = np.array([128, 128, 129, 129, 130, 130, 131, 131], dtype=np.uint8)

spin_number = get_spin_number(de_met, de_spin)

assert np.array_equal(spin_number & 0xFF, de_spin)


@pytest.mark.external_test_data
def test_get_eventtimes(test_fixture, use_fake_spin_data_for_time):
def test_get_eventtimes(test_fixture, aux_dataset):
"""Tests get_eventtimes function."""
df_filt, _, _, de_dataset = test_fixture
# Create a spin table that cover spin 0-141
use_fake_spin_data_for_time(0, 141 * 15)

event_times, spin_starts, spin_period_sec = get_eventtimes(
de_dataset["spin"].values, de_dataset["phase_angle"].values
event_times, spin_start_times, spin_numbers = get_event_times(
aux_dataset,
de_dataset["phase_angle"].values,
de_dataset["shcoarse"].values,
)

spin_df = get_spin_data()
expected_min_df = spin_df[spin_df["spin_number"] == de_dataset["spin"].values.min()]
expected_max_df = spin_df[spin_df["spin_number"] == de_dataset["spin"].values.max()]
spin_period_sec_min = expected_min_df["spin_period_sec"].values[0]
spin_period_sec_max = expected_max_df["spin_period_sec"].values[0]

spin_start_min = (
expected_min_df["spin_start_sec_sclk"]
+ expected_min_df["spin_start_subsec_sclk"] / 1e6
)
spin_start_max = (
expected_max_df["spin_start_sec_sclk"]
+ expected_max_df["spin_start_subsec_sclk"] / 1e6
# Check shapes
assert (
event_times.shape
== spin_start_times.shape
== spin_numbers.shape
== de_dataset["phase_angle"].shape
)

assert (
ttj2000ns_to_et(met_to_ttj2000ns(spin_start_min.values[0])) == spin_starts.min()
t1_start_sec = aux_dataset["timespinstart"].values[0]
t1_start_subsec = aux_dataset["timespinstartsub"].values[0]
t1_start_dur = aux_dataset["duration"].values[0]

expected_first_event_time = (
t1_start_sec
+ (t1_start_subsec + t1_start_dur * de_dataset["phase_angle"].values[0] / 720.0)
/ 1000.0
)
# Check first event
assert (
ttj2000ns_to_et(met_to_ttj2000ns(spin_start_max.values[0])) == spin_starts.max()
ttj2000ns_to_et(met_to_ttj2000ns(expected_first_event_time)) == event_times[0]
)

event_times_min = spin_start_min.values[0] + spin_period_sec_min * (
de_dataset["phase_angle"][0] / 720
)
event_times_max = spin_start_max.values[0] + spin_period_sec_max * (
de_dataset["phase_angle"][-1] / 720
)
# Check that each calculated event times fall within the expected range
# Use the coarse time to determine the expected range
spin_starts = ttj2000ns_to_et(met_to_ttj2000ns(aux_dataset["timespinstart"].values))
for i, coarse_time in enumerate(de_dataset["shcoarse"].values):
boundary = np.searchsorted(spin_starts, coarse_time, side="right") - 1
if boundary < 0 or boundary >= len(spin_starts) - 1:
continue
start_time = spin_starts[boundary]
end_time = spin_starts[boundary + 1]
assert start_time <= int(event_times[i]) <= end_time


assert ttj2000ns_to_et(met_to_ttj2000ns(event_times_min)) == event_times.min()
assert ttj2000ns_to_et(met_to_ttj2000ns(event_times_max)) == event_times.max()
@pytest.mark.external_test_data
def test_get_event_times_out_of_range(test_fixture, aux_dataset):
"""Tests get_event_times with out of range values."""
df_filt, _, _, de_dataset = test_fixture
# Get min time from aux_dataset
min_time = aux_dataset["timespinstart"].values.min()
# Set some coarse times to be out of range (less than min_time)
coarse_times = de_dataset["shcoarse"].values.copy()
# Set first coarse time to be out of range
coarse_times[0] = min_time - 1000

with pytest.raises(
ValueError,
match="Coarse MET time contains events outside aux_dataset time range",
):
get_event_times(
aux_dataset,
de_dataset["phase_angle"].values,
coarse_times,
)


@pytest.mark.external_test_data
Expand Down
5 changes: 4 additions & 1 deletion imap_processing/ultra/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ class UltraConstants:

# Composite energy threshold for SSD events
COMPOSITE_ENERGY_THRESHOLD: int = 1707

# Geometry-related constants
Z_DSTOP: float = 2.6 / 2 # Position of stop foil on Z axis [mm]
Z_DS: float = 46.19 - (2.6 / 2) # Position of slit on Z axis [mm]
Expand Down Expand Up @@ -157,3 +156,7 @@ class UltraConstants:
"proton": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19],
"non_proton": [20, 21, 22, 23, 24, 25, 26],
}

# For FOV calculations
FOV_THETA_OFFSET_DEG = 0.0
FOV_PHI_LIMIT_DEG = 60.0
83 changes: 36 additions & 47 deletions imap_processing/ultra/l1b/de.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,9 @@
)
from imap_processing.spice.geometry import SpiceFrame
from imap_processing.spice.repoint import get_pointing_times_from_id
from imap_processing.spice.time import et_to_met
from imap_processing.spice.time import (
et_to_met,
)
from imap_processing.ultra.l1b.lookup_utils import get_geometric_factor
from imap_processing.ultra.l1b.ultra_l1b_annotated import (
get_annotated_particle_velocity,
Expand All @@ -28,14 +30,13 @@
get_efficiency,
get_energy_pulse_height,
get_energy_ssd,
get_eventtimes,
get_event_times,
get_front_x_position,
get_front_y_position,
get_fwhm,
get_path_length,
get_ph_tof_and_back_positions,
get_phi_theta,
get_spin_number,
get_ssd_back_position_and_tof_offset,
get_ssd_tof,
is_back_tof_valid,
Expand All @@ -49,7 +50,7 @@


def calculate_de(
de_dataset: xr.Dataset, name: str, ancillary_files: dict
de_dataset: xr.Dataset, aux_dataset: xr.Dataset, name: str, ancillary_files: dict
) -> xr.Dataset:
"""
Create dataset with defined datatypes for Direct Event Data.
Expand All @@ -58,6 +59,8 @@ def calculate_de(
----------
de_dataset : xarray.Dataset
L1a dataset containing direct event data.
aux_dataset : xarray.Dataset
L1a dataset containing auxiliary data.
name : str
Name of the l1a dataset.
ancillary_files : dict
Expand All @@ -71,17 +74,13 @@ def calculate_de(
de_dict = {}
sensor = parse_filename_like(name)["sensor"][0:2]

# Define epoch and spin.
# Define epoch
de_dict["epoch"] = de_dataset["epoch"].data
spin_number = get_spin_number(
de_dataset["shcoarse"].values, de_dataset["spin"].values
)

repoint_id = de_dataset.attrs.get("Repointing", None)
if repoint_id is not None:
repoint_id = int(repoint_id.replace("repoint", ""))

de_dict["spin"] = spin_number

# Add already populated fields.
keys = [
"coincidence_type",
Expand All @@ -104,7 +103,6 @@ def calculate_de(
for key, dataset_key in zip(keys, dataset_keys, strict=False)
}
)

valid_mask = de_dataset["start_type"].data != FILLVAL_UINT8
ph_mask = np.isin(
de_dataset["stop_type"].data, [StopType.Top.value, StopType.Bottom.value]
Expand All @@ -114,7 +112,6 @@ def calculate_de(
valid_indices = np.nonzero(valid_mask)[0]
ph_indices = np.nonzero(valid_mask & ph_mask)[0]
ssd_indices = np.nonzero(valid_mask & ssd_mask)[0]

# Instantiate arrays
xf = np.full(len(de_dataset["epoch"]), FILLVAL_FLOAT32, dtype=np.float32)
yf = np.full(len(de_dataset["epoch"]), FILLVAL_FLOAT32, dtype=np.float32)
Expand All @@ -135,12 +132,10 @@ def calculate_de(
e_bin_l1a = np.full(len(de_dataset["epoch"]), FILLVAL_UINT8, dtype=np.uint8)
species_bin = np.full(len(de_dataset["epoch"]), FILLVAL_UINT8, dtype=np.uint8)
t2 = np.full(len(de_dataset["epoch"]), FILLVAL_FLOAT32, dtype=np.float32)
event_times = np.full(len(de_dataset["epoch"]), FILLVAL_FLOAT32, dtype=np.float32)
shape = (len(de_dataset["epoch"]), 3)
sc_velocity = np.full(shape, FILLVAL_FLOAT32, dtype=np.float32)
sc_dps_velocity = np.full(shape, FILLVAL_FLOAT32, dtype=np.float32)
helio_velocity = np.full(shape, FILLVAL_FLOAT32, dtype=np.float32)
spin_starts = np.full(len(de_dataset["epoch"]), FILLVAL_FLOAT32, dtype=np.float64)
velocities = np.full(shape, FILLVAL_FLOAT32, dtype=np.float32)
v_hat = np.full(shape, FILLVAL_FLOAT32, dtype=np.float32)
r_hat = np.full(shape, FILLVAL_FLOAT32, dtype=np.float32)
Expand All @@ -163,16 +158,13 @@ def calculate_de(
ancillary_files,
)
start_type[valid_indices] = de_dataset["start_type"].data[valid_indices]

(
event_times[valid_indices],
spin_starts[valid_indices],
_,
) = get_eventtimes(
de_dict["spin"][valid_indices],
de_dataset["phase_angle"].data[valid_indices],
(event_times, spin_starts, spin_number) = get_event_times(
aux_dataset,
de_dataset["phase_angle"].data,
de_dataset["shcoarse"].data,
)

de_dict["spin"] = spin_number
de_dict["event_times"] = event_times.astype(np.float64)
# Pulse height
ph_result = get_ph_tof_and_back_positions(
de_dataset, xf, f"ultra{sensor}", ancillary_files
Expand Down Expand Up @@ -325,31 +317,27 @@ def calculate_de(
# Annotated Events.
ultra_frame = getattr(SpiceFrame, f"IMAP_ULTRA_{sensor}")

# Account for counts=0 (event times have FILL value)
valid_events = (event_times != FILLVAL_FLOAT32).copy()
valid_events = np.ones(event_times.shape, bool)

if repoint_id is not None:
in_pointing = calculate_events_in_pointing(
repoint_id, event_times[valid_events]
)
events_to_flag = np.zeros(len(quality_flags), dtype=bool)
events_to_flag[valid_events] = ~in_pointing
in_pointing = calculate_events_in_pointing(repoint_id, et_to_met(event_times))
events_to_flag = ~in_pointing
# Update quality flags for valid events that are not in the pointing
quality_flags[events_to_flag] |= ImapDEOutliersUltraFlags.DURINGREPOINT.value
# Update valid_events to only include times within a pointing
valid_events[valid_events] &= in_pointing

if np.any(valid_events):
(
sc_velocity[valid_events],
sc_dps_velocity[valid_events],
helio_velocity[valid_events],
) = get_annotated_particle_velocity(
event_times[valid_events],
velocities.astype(np.float32)[valid_events],
ultra_frame,
SpiceFrame.IMAP_DPS,
SpiceFrame.IMAP_SPACECRAFT,
)
valid_events &= in_pointing

(
sc_velocity[valid_events],
sc_dps_velocity[valid_events],
helio_velocity[valid_events],
) = get_annotated_particle_velocity(
event_times[valid_events],
velocities.astype(np.float32)[valid_events],
ultra_frame,
SpiceFrame.IMAP_DPS,
SpiceFrame.IMAP_SPACECRAFT,
)

de_dict["velocity_sc"] = sc_velocity
de_dict["velocity_dps_sc"] = sc_dps_velocity
Expand Down Expand Up @@ -416,7 +404,7 @@ def calculate_events_in_pointing(
repoint_id : int
The repointing ID.
event_times : np.ndarray
Array of event times in ET.
Array of event times in MET.

Returns
-------
Expand All @@ -425,9 +413,10 @@ def calculate_events_in_pointing(
combined with the valid_events mask.
"""
pointing_start_met, pointing_end_met = get_pointing_times_from_id(repoint_id)

# Check which events are within the pointing
in_pointing = (et_to_met(event_times) >= pointing_start_met) & (
et_to_met(event_times) <= pointing_end_met
in_pointing = (event_times >= pointing_start_met) & (
event_times <= pointing_end_met
)

return in_pointing
Loading