Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion imap_processing/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -832,14 +832,24 @@ def do_processing( # noqa: PLR0912
f"got {len(cal_prod_paths)}"
)

l1a_diagfee_paths = dependencies.get_file_paths(
source="hi", data_type="l1a", descriptor="diagfee"
)
if len(l1a_diagfee_paths) != 1:
raise ValueError(
f"Expected one L1A DIAG_FEE file, got {len(l1a_diagfee_paths)}"
)

# Load CDFs before passing to hi_goodtimes
l1b_de_datasets = [load_cdf(path) for path in l1b_de_paths]
l1b_hk = load_cdf(l1b_hk_paths[0])
l1a_diagfee = load_cdf(l1a_diagfee_paths[0])

datasets = hi_goodtimes.hi_goodtimes(
l1b_de_datasets,
self.repointing,
l1b_de_datasets,
l1b_hk,
l1a_diagfee,
cal_prod_paths[0],
)
else:
Expand Down
125 changes: 112 additions & 13 deletions imap_processing/hi/hi_goodtimes.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,10 @@ class CullCode(IntEnum):


def hi_goodtimes(
l1b_de_datasets: list[xr.Dataset],
current_repointing: str,
l1b_de_datasets: list[xr.Dataset],
l1b_hk: xr.Dataset,
l1a_diagfee: xr.Dataset,
cal_product_config_path: Path,
) -> list[xr.Dataset]:
"""
Expand All @@ -58,23 +59,26 @@ def hi_goodtimes(

1. mark_incomplete_spin_sets - Remove incomplete 8-spin histogram periods
2. mark_drf_times - Remove times during spacecraft drift restabilization
3. mark_overflow_packets - Remove times when DE packets overflow
4. mark_statistical_filter_0 - Detect drastic penetrating background changes
5. mark_statistical_filter_1 - Detect isotropic count rate increases
6. mark_statistical_filter_2 - Detect short-lived event pulses
3. mark_bad_tdc_cal - Remove times with failed TDC calibration
4. mark_overflow_packets - Remove times when DE packets overflow
5. mark_statistical_filter_0 - Detect drastic penetrating background changes
6. mark_statistical_filter_1 - Detect isotropic count rate increases
7. mark_statistical_filter_2 - Detect short-lived event pulses

Parameters
----------
current_repointing : str
Repointing identifier for the current pointing (e.g., "repoint00001").
Used to identify which dataset in l1b_de_datasets is the current one.
l1b_de_datasets : list[xr.Dataset]
L1B DE datasets for surrounding pointings. Typically includes
current plus 3 preceding and 3 following pointings (7 total).
Statistical filters 0 and 1 use all datasets; other filters use
only the current pointing.
current_repointing : str
Repointing identifier for the current pointing (e.g., "repoint00001").
Used to identify which dataset in l1b_de_datasets is the current one.
l1b_hk : xr.Dataset
L1B housekeeping dataset containing DRF status.
l1a_diagfee : xr.Dataset
L1A DIAG_FEE dataset containing TDC calibration status.
cal_product_config_path : Path
Path to calibration product configuration CSV file.

Expand Down Expand Up @@ -131,6 +135,7 @@ def hi_goodtimes(
l1b_de_datasets,
current_index,
l1b_hk,
l1a_diagfee,
cal_product_config_path,
)
else:
Expand Down Expand Up @@ -200,12 +205,13 @@ def _apply_goodtimes_filters(
l1b_de_datasets: list[xr.Dataset],
current_index: int,
l1b_hk: xr.Dataset,
l1a_diagfee: xr.Dataset,
cal_product_config_path: Path,
Comment thread
tmplummer marked this conversation as resolved.
) -> None:
"""
Apply all goodtimes culling filters to the dataset.

Modifies goodtimes_ds in place by applying filters 1-6.
Modifies goodtimes_ds in place by applying filters 1-7.

Parameters
----------
Expand All @@ -217,6 +223,8 @@ def _apply_goodtimes_filters(
Index of the current pointing in l1b_de_datasets.
l1b_hk : xr.Dataset
L1B housekeeping dataset.
l1a_diagfee : xr.Dataset
L1A DIAG_FEE dataset containing TDC calibration status.
cal_product_config_path : Path
Path to calibration product configuration CSV file.
"""
Expand Down Expand Up @@ -263,23 +271,27 @@ def _apply_goodtimes_filters(
logger.info("Applying filter: mark_drf_times")
mark_drf_times(goodtimes_ds, l1b_hk)

# 3. Mark overflow packets
# 3. Mark bad TDC calibration times
logger.info("Applying filter: mark_bad_tdc_cal")
mark_bad_tdc_cal(goodtimes_ds, l1a_diagfee)

# 4. Mark overflow packets
logger.info("Applying filter: mark_overflow_packets")
mark_overflow_packets(goodtimes_ds, current_l1b_de, cal_product_config)

# 4. Statistical Filter 0 - drastic background changes
# 5. Statistical Filter 0 - drastic background changes
logger.info("Applying filter: mark_statistical_filter_0")
mark_statistical_filter_0(goodtimes_ds, l1b_de_datasets, current_index)

# 5. Statistical Filter 1 - isotropic count rate increases
# 6. Statistical Filter 1 - isotropic count rate increases
logger.info("Applying filter: mark_statistical_filter_1")
mark_statistical_filter_1(
goodtimes_ds,
l1b_de_datasets,
current_index,
)

# 6. Statistical Filter 2 - short-lived event pulses
# 7. Statistical Filter 2 - short-lived event pulses
logger.info("Applying filter: mark_statistical_filter_2")
mark_statistical_filter_2(
goodtimes_ds,
Expand Down Expand Up @@ -1130,6 +1142,93 @@ def mark_overflow_packets(
)


def mark_bad_tdc_cal(
goodtimes_ds: xr.Dataset,
diagfee: xr.Dataset,
cull_code: int = CullCode.LOOSE,
) -> None:
"""
Remove times with failed TDC calibration (DIAG_FEE method).

Based on C reference: drop_bad_tdc_diagfee in culling_v2.c provided by
IMAP-Hi team.

This function scans DIAG_FEE packets chronologically and checks the TDC
calibration status for each packet. If any TDC has failed calibration,
all times from that DIAG_FEE packet until the next DIAG_FEE packet are
marked as bad.

Parameters
----------
goodtimes_ds : xr.Dataset
Goodtimes dataset to update with cull flags.
diagfee : xr.Dataset
DIAG_FEE dataset containing TDC calibration status fields:
- shcoarse: Mission Elapsed Time (MET)
- tdc1_cal_ctrl_stat: TDC1 calibration status (bit 1 = success)
- tdc2_cal_ctrl_stat: TDC2 calibration status (bit 1 = success)
- tdc3_cal_ctrl_stat: TDC3 calibration status (bit 1 = success)
cull_code : int, optional
Cull code to use for marking bad times. Default is CullCode.LOOSE.

Notes
-----
This function modifies goodtimes_ds in place.

Quirk: Two DIAG_FEE packets are generated when entering HVSCI mode.
The first packet is skipped if two packets appear within 10 seconds.
Comment thread
tmplummer marked this conversation as resolved.
"""
logger.info("Running mark_bad_tdc_cal culling")

# Based on sample code in culling_v2.c, skip this check if we have fewer
# than two diag_fee packets.
if len(diagfee.epoch) < 2:
logger.warning(
f"Insufficient DIAG_FEE packets to select good times "
f"(found {len(diagfee.epoch)}, need at least 2)"
)
return

diagfee_met = diagfee["shcoarse"].values
goodtimes_met = goodtimes_ds.coords["met"].values

# Identify duplicate packets: skip if followed by another within 10 seconds
time_gaps = np.diff(diagfee_met)
is_duplicate = np.concatenate([time_gaps < 10, [False]])

# Identify any packets where any of the three TDC calibrations failed.
# TDC failure check (bit 1: 1=good, 0=bad)
tdc_failed = (
((diagfee["tdc1_cal_ctrl_stat"].values & 2) == 0)
| ((diagfee["tdc2_cal_ctrl_stat"].values & 2) == 0)
| ((diagfee["tdc3_cal_ctrl_stat"].values & 2) == 0)
)

# Only loop over non-duplicate packets with TDC failures
tdc_failed_indices = np.nonzero(~is_duplicate & tdc_failed)[0]

n_times_removed = 0
for i in tdc_failed_indices:
# Remove times from this DIAG_FEE packet until next. We are skipping the
# first packet of a duplicate pair, so determining the window based on the
# current packet met and next packet met covers the time window between
# non-duplicate DIAG_FEE packets. We can ignore the ~10 seconds of slop
# around duplicate packets because these packets should only be produced
# when IMAP-Hi is transitioning to HVSCI mode which means that there will
# be no DE packets being produced.
df_time = diagfee_met[i]
next_df_time = diagfee_met[i + 1] if i < len(diagfee_met) - 1 else np.inf

in_window = (goodtimes_met >= df_time) & (goodtimes_met < next_df_time)
mets_to_cull = goodtimes_met[in_window]

if len(mets_to_cull) > 0:
goodtimes_ds.goodtimes.mark_bad_times(met=mets_to_cull, cull=cull_code)
n_times_removed += len(mets_to_cull)

logger.info(f"Dropped {n_times_removed} time(s) due to bad TDC calibration")


def _get_sweep_indices(esa_step: np.ndarray) -> np.ndarray:
"""
Assign sweep indices to each MET based on ESA step transitions.
Expand Down
Loading
Loading