Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
197 changes: 182 additions & 15 deletions imap_processing/ialirt/l0/process_codice.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Functions to support I-ALiRT CoDICE processing."""

import logging
import pathlib
from collections import namedtuple
from decimal import Decimal
from pathlib import Path
from typing import Any
Expand All @@ -15,6 +15,12 @@
from imap_processing.codice.codice_l1a_ialirt_hi import l1a_ialirt_hi
from imap_processing.codice.codice_l1a_lo_species import l1a_lo_species
from imap_processing.codice.codice_l1b import convert_to_rates
from imap_processing.codice.codice_l2 import (
compute_geometric_factors,
get_efficiency_lut,
get_geometric_factor_lut,
process_lo_species_intensity,
)
from imap_processing.ialirt.utils.grouping import find_groups
from imap_processing.ialirt.utils.time import calculate_time
from imap_processing.spice.time import met_to_ttj2000ns, met_to_utc
Expand All @@ -28,6 +34,18 @@
COD_LO_RANGE = range(0, 15)
COD_HI_RANGE = range(0, 5)

COD_LO_L2 = namedtuple(
"COD_LO_L2",
[
"c_over_o_abundance",
"mg_over_o_abundance",
"fe_over_o_abundance",
"c_plus_6_over_c_plus_5",
"o_plus_7_over_o_plus_6",
"fe_low_over_fe_high",
],
)


def process_ialirt_data_streams(
grouped_data: list[bytearray],
Expand Down Expand Up @@ -141,7 +159,6 @@ def create_xarray_dataset(
science_values: list,
metadata_values: dict,
sensor: str,
lut_file: pathlib.Path,
) -> xr.Dataset:
"""
Create a xarray Dataset from science and metadata values.
Expand All @@ -154,8 +171,6 @@ def create_xarray_dataset(
Dictionary of metadata values.
sensor : str
The sensor type, either 'lo' or 'hi'.
lut_file : pathlib.Path
Path to the LUT file.

Returns
-------
Expand Down Expand Up @@ -190,7 +205,7 @@ def create_xarray_dataset(


def convert_to_intensities(
cod_hi_l1b_data: xr.Dataset, l2_lut_path: pathlib.Path, species: str
cod_hi_l1b_data: xr.Dataset, l2_lut_path: Path, species: str
) -> NDArray:
"""
Calculate intensities.
Expand All @@ -199,7 +214,7 @@ def convert_to_intensities(
----------
cod_hi_l1b_data : xr.Dataset
L1b data.
l2_lut_path : pathlib.Path
l2_lut_path : Path
L2 LUT path.
species : str
CoDICE Hi species.
Expand Down Expand Up @@ -242,10 +257,141 @@ def convert_to_intensities(
return intensity


def calculate_ratios(
cod_lo_l1b_data: xr.Dataset,
l2_lut_path: Path,
l2_geometric_factor_path: Path | None,
) -> COD_LO_L2:
"""
Calculate CoDICE-Lo L2 data products.

Parameters
----------
cod_lo_l1b_data : xarray.Dataset
Data in xarray format.
l2_lut_path : Path
Efficiency lookup table.
l2_geometric_factor_path : Path
Geometric factor lookup table.

Returns
-------
c_over_o_abundance : float
Ratio of C over O.
mg_over_o_abundance : float
Ratio of Mg over O.
fe_over_o_abundance : float
Ratio of Fe over O.
c_plus_6_over_c_plus_5 : np.array
Ratio of C+6 over C+5.
o_plus_7_over_o_plus_6 : np.array
Ratio of O+7 over O+6.
fe_low_over_fe_high : np.array
Ratio of Fe low over Fe high.
"""
geometric_factor_lookup = get_geometric_factor_lut(None, l2_geometric_factor_path)
geometric_factors = compute_geometric_factors(
cod_lo_l1b_data, geometric_factor_lookup
)

efficiency_lookup = get_efficiency_lut(None, l2_lut_path)
efficiencies = efficiency_lookup[efficiency_lookup["product"] == "sw"]
intensity = process_lo_species_intensity(
cod_lo_l1b_data,
constants.LO_IALIRT_VARIABLE_NAMES,
geometric_factors,
efficiencies,
constants.SOLAR_WIND_POSITIONS,
)
pseudo_density_dict = {}

for species in constants.LO_IALIRT_VARIABLE_NAMES:
pseudo_density = (
intensity[species]
* np.sqrt(cod_lo_l1b_data["energy_table"])
* np.sqrt(constants.LO_IALIRT_M_OVER_Q[species])
) # (epoch, esa_step, spin_sector)

summed_pseudo_density = pseudo_density.sum(dim="esa_step").squeeze(
"spin_sector"
) # (epoch,)
pseudo_density_dict[species] = summed_pseudo_density.values

species_list = constants.LO_IALIRT_VARIABLE_NAMES

# Denominator.
# Note that outside of this test a zero value denominator
# will lead to a null value.
# The use of zeros here is only to match the test data as
# confirmed by the instrument team.
o_abundance_denom = (
pseudo_density_dict[species_list[3]]
+ pseudo_density_dict[species_list[4]]
+ pseudo_density_dict[species_list[5]]
)

c_over_o_abundance_num = (
pseudo_density_dict[species_list[1]] + pseudo_density_dict[species_list[2]]
)
mg_over_o_abundance_num = pseudo_density_dict[species_list[6]]
fe_over_o_abundance_num = (
pseudo_density_dict[species_list[7]] + pseudo_density_dict[species_list[8]]
)

if float(o_abundance_denom) != 0:
c_over_o_abundance = c_over_o_abundance_num / o_abundance_denom
mg_over_o_abundance = mg_over_o_abundance_num / o_abundance_denom
fe_over_o_abundance = fe_over_o_abundance_num / o_abundance_denom

c_over_o_abundance = Decimal(f"{c_over_o_abundance:.3f}")
mg_over_o_abundance = Decimal(f"{mg_over_o_abundance:.3f}")
fe_over_o_abundance = Decimal(f"{fe_over_o_abundance:.3f}")
else:
c_over_o_abundance, mg_over_o_abundance, fe_over_o_abundance = (
FILLVAL_FLOAT32,
FILLVAL_FLOAT32,
FILLVAL_FLOAT32,
)

if float(pseudo_density_dict[species_list[1]]) != 0:
c_plus_6_over_c_plus_5 = (
pseudo_density_dict[species_list[2]] / pseudo_density_dict[species_list[1]]
)

c_plus_6_over_c_plus_5 = Decimal(f"{c_plus_6_over_c_plus_5:.3f}")
else:
c_plus_6_over_c_plus_5 = FILLVAL_FLOAT32

if float(pseudo_density_dict[species_list[3]]) != 0:
o_plus_7_over_o_plus_6 = (
pseudo_density_dict[species_list[4]] / pseudo_density_dict[species_list[3]]
)
o_plus_7_over_o_plus_6 = Decimal(f"{o_plus_7_over_o_plus_6:.3f}")
else:
o_plus_7_over_o_plus_6 = FILLVAL_FLOAT32

if float(pseudo_density_dict[species_list[8]]) != 0:
fe_low_over_fe_high = (
pseudo_density_dict[species_list[7]] / pseudo_density_dict[species_list[8]]
)
fe_low_over_fe_high = Decimal(f"{fe_low_over_fe_high:.3f}")
else:
fe_low_over_fe_high = FILLVAL_FLOAT32

return COD_LO_L2(
c_over_o_abundance=c_over_o_abundance,
mg_over_o_abundance=mg_over_o_abundance,
fe_over_o_abundance=fe_over_o_abundance,
c_plus_6_over_c_plus_5=c_plus_6_over_c_plus_5,
o_plus_7_over_o_plus_6=o_plus_7_over_o_plus_6,
fe_low_over_fe_high=fe_low_over_fe_high,
)


def process_codice(
dataset: xr.Dataset,
l1a_lut_path: pathlib.Path,
l2_lut_path: pathlib.Path,
l1a_lut_path: Path,
l2_lut_path: Path,
sensor: str,
l2_geometric_factor_path: Path | None = None,
) -> tuple:
Expand All @@ -256,13 +402,13 @@ def process_codice(
----------
dataset : xr.Dataset
Decommed L0 data.
l1a_lut_path : pathlib.Path
l1a_lut_path : Path
L1A LUT path.
l2_lut_path : pathlib.Path
l2_lut_path : Path
L2 LUT path.
sensor : str
Sensor (codice_hi or codice_lo).
l2_geometric_factor_path : pathlib.Path
l2_geometric_factor_path : Path
Optional geometric factor path based on the sensor (required by Lo).

Returns
Expand Down Expand Up @@ -318,9 +464,30 @@ def process_codice(
[cod_lo_data_stream]
)
cod_lo_dataset = create_xarray_dataset(
cod_lo_science_values, cod_lo_metadata_values, "lo", l1a_lut_path
cod_lo_science_values, cod_lo_metadata_values, "lo"
)
l1a_lo = l1a_lo_species(cod_lo_dataset, l1a_lut_path)
l1b_lo = convert_to_rates(
l1a_lo,
"lo-ialirt",
)
l2_lo = calculate_ratios(l1b_lo, l2_lut_path, l2_geometric_factor_path)

codice_lo_data.append(
{
"apid": 478,
"met": int(met[0]),
"met_in_utc": met_to_utc(met[0]).split(".")[0],
"ttj2000ns": int(met_to_ttj2000ns(met[0])),
"instrument": f"{sensor}",
f"{sensor}_c_over_o_abundance": l2_lo.c_over_o_abundance,
f"{sensor}_mg_over_o_abundance": l2_lo.mg_over_o_abundance,
f"{sensor}_fe_over_o_abundance": l2_lo.fe_over_o_abundance,
f"{sensor}_c_plus_6_over_c_plus_5": l2_lo.c_plus_6_over_c_plus_5,
f"{sensor}_o_plus_7_over_o_plus_6": l2_lo.o_plus_7_over_o_plus_6,
f"{sensor}_fe_low_over_fe_high": l2_lo.fe_low_over_fe_high,
}
)
result = l1a_lo_species(cod_lo_dataset, l1a_lut_path) # noqa

if sensor == "codice_hi" and unique_cod_hi_groups.size > 0:
for group in unique_cod_hi_groups:
Expand All @@ -335,7 +502,7 @@ def process_codice(
[cod_hi_data_stream]
)
cod_hi_dataset = create_xarray_dataset(
cod_hi_science_values, cod_hi_metadata_values, "hi", l1a_lut_path
cod_hi_science_values, cod_hi_metadata_values, "hi"
)
l1a_hi = l1a_ialirt_hi(cod_hi_dataset, l1a_lut_path)
l1b_hi = convert_to_rates(
Expand All @@ -356,7 +523,7 @@ def process_codice(
"ttj2000ns": int(met_to_ttj2000ns(met[0])),
"instrument": f"{sensor}",
f"{sensor}_epoch": [int(epoch) for epoch in l1b_hi["epoch"]],
f"{sensor}_l2_hi": dec_l2_hi,
f"{sensor}_h": dec_l2_hi,
}
)

Expand Down
47 changes: 42 additions & 5 deletions imap_processing/tests/ialirt/unit/test_process_codice.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from imap_processing.ialirt.l0.process_codice import (
COD_HI_COUNTER,
COD_LO_COUNTER,
FILLVAL_FLOAT32,
FILLVAL_UINT8,
concatenate_bytes,
convert_to_intensities,
Expand Down Expand Up @@ -405,7 +406,7 @@ def test_create_xarray_dataset_basic(l1a_lut_path):
"SPIN_PERIOD": np.array([24]),
}

ds = create_xarray_dataset(science_values, metadata_values, "lo", l1a_lut_path)
ds = create_xarray_dataset(science_values, metadata_values, "lo")

for key in metadata_values:
assert key.lower() in ds.variables
Expand Down Expand Up @@ -484,7 +485,7 @@ def test_group_and_decompress_ialirt_cod_lo(

np.testing.assert_array_equal(decompressed_values, test_decom_data_array)

dataset = create_xarray_dataset(science_values, metadata_values, "lo", l1a_lut_path)
dataset = create_xarray_dataset(science_values, metadata_values, "lo")
result = l1a_lo_species(dataset, l1a_lut_path)

expected_species = [
Expand Down Expand Up @@ -569,7 +570,7 @@ def test_group_and_decompress_ialirt_cod_hi(

np.testing.assert_array_equal(decompressed_values, test_decom_data[i])

dataset = create_xarray_dataset(science_values, metadata_values, "hi", l1a_lut_path)
dataset = create_xarray_dataset(science_values, metadata_values, "hi")
result = l1a_ialirt_hi(dataset, l1a_lut_path)

expected_species = [
Expand Down Expand Up @@ -605,7 +606,7 @@ def test_l2_ialirt_cod_hi(cod_hi_l1b_test_data, l2_lut_path, cod_hi_l2_test_data


@pytest.mark.external_test_data
def test_process_codice_lo(
def test_l2_ialirt_cod_lo(
cod_lo_l1b_test_data, l1a_lut_path, cod_lo_l2_test_data, l2_processing_dependencies
):
"""Test process_codice for hi."""
Expand Down Expand Up @@ -732,6 +733,42 @@ def test_process_codice_lo(
)


@pytest.mark.external_test_data
def test_process_codice_lo(
cod_lo_test_dataset,
l1a_lut_path,
l2_lut_path,
cod_lo_l2_test_data,
l2_processing_dependencies,
):
"""Test process_codice for hi."""
eff_path, gf_path = l2_processing_dependencies

n = cod_lo_test_dataset.dims["epoch"]
cod_lo_test_dataset = cod_lo_test_dataset.assign(
sc_sclk_sec=("epoch", np.zeros(n, dtype=np.int64)),
sc_sclk_sub_sec=("epoch", np.zeros(n, dtype=np.int64)),
)

cod_lo_data, _ = process_codice(
cod_lo_test_dataset, l1a_lut_path, eff_path, "codice_lo", gf_path
)

l2_products = [
"codice_lo_c_over_o_abundance",
"codice_lo_mg_over_o_abundance",
"codice_lo_fe_over_o_abundance",
"codice_lo_c_plus_6_over_c_plus_5",
"codice_lo_o_plus_7_over_o_plus_6",
"codice_lo_fe_low_over_fe_high",
]

assert len(cod_lo_data) == 9

for product in l2_products:
assert cod_lo_data[0][product] == FILLVAL_FLOAT32


@pytest.mark.external_test_data
def test_process_codice_hi(
cod_hi_test_dataset, l1a_lut_path, l2_lut_path, cod_hi_l2_test_data
Expand All @@ -756,7 +793,7 @@ def test_process_codice_hi(
)

for i, group in enumerate(cod_hi_data):
arr = np.array(group["codice_hi_l2_hi"], dtype=float)
arr = np.array(group["codice_hi_h"], dtype=float)

np.testing.assert_allclose(
arr,
Expand Down