Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 13 additions & 4 deletions imap_processing/glows/l2/glows_l2.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
PipelineSettings,
)
from imap_processing.glows.l2.glows_l2_data import HistogramL2
from imap_processing.glows.utils.constants import GlowsConstants
from imap_processing.spice.time import (
et_to_datetime64,
met_to_utc,
Expand Down Expand Up @@ -96,7 +97,7 @@ def create_l2_dataset(
)

bins = xr.DataArray(
np.arange(histogram_l2.daily_lightcurve.number_of_bins, dtype=np.uint32),
np.arange(GlowsConstants.STANDARD_BIN_COUNT, dtype=np.uint32),
name="bins",
dims=["bins"],
attrs=attrs.get_variable_attributes("bins_dim", check_schema=False),
Expand Down Expand Up @@ -187,19 +188,27 @@ def create_l2_dataset(
attrs=attrs.get_variable_attributes(key),
)

n_bins = histogram_l2.daily_lightcurve.number_of_bins
for key, value in dataclasses.asdict(histogram_l2.daily_lightcurve).items():
if key == "number_of_bins":
# number_of_bins does not have n_bins dimensions.
# number_of_bins does not have a bins dimension.
output[key] = xr.DataArray(
np.array([value]),
dims=["epoch"],
attrs=attrs.get_variable_attributes(key),
)
else:
# Bin arrays are chopped to number_of_bins in DailyLightcurve to
# avoid operating on FILLVAL data. Re-expand to STANDARD_BIN_COUNT
# here, filling unused bins with the variable's CDF FILLVAL.
var_attrs = attrs.get_variable_attributes(key)
fillval = var_attrs["FILLVAL"]
padded = np.full(GlowsConstants.STANDARD_BIN_COUNT, fillval)
padded[:n_bins] = value
output[key] = xr.DataArray(
np.array([value]),
np.array([padded]),
dims=["epoch", "bins"],
attrs=attrs.get_variable_attributes(key),
attrs=var_attrs,
)

return output
108 changes: 85 additions & 23 deletions imap_processing/glows/l2/glows_l2_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@

from imap_processing.glows import FLAG_LENGTH
from imap_processing.glows.l1b.glows_l1b_data import PipelineSettings
from imap_processing.glows.utils.constants import GlowsConstants
from imap_processing.spice.geometry import SpiceFrame, get_instrument_mounting_az_el


@dataclass
Expand Down Expand Up @@ -47,15 +49,15 @@ class DailyLightcurve:
raw_histograms: np.ndarray = field(init=False)
exposure_times: np.ndarray = field(init=False)
flux_uncertainties: np.ndarray = field(init=False)
# TODO: flag array
histogram_flag_array: np.ndarray = field(init=False)
# TODO: ecliptic coordinates
ecliptic_lon: np.ndarray = field(init=False)
ecliptic_lat: np.ndarray = field(init=False)
number_of_bins: int = field(init=False)
l1b_data: InitVar[xr.Dataset]
position_angle: InitVar[float]

def __post_init__(self, l1b_data: xr.Dataset) -> None:
def __post_init__(self, l1b_data: xr.Dataset, position_angle: float) -> None:
"""
Compute all the daily lightcurve variables from L1B data.

Expand All @@ -64,10 +66,27 @@ def __post_init__(self, l1b_data: xr.Dataset) -> None:
l1b_data : xarray.Dataset
L1B data filtered by good times, good angles, and good bins for one
observation day.
position_angle : float
The offset angle of the GLOWS instrument from the north spin point - this
is used in spin angle calculations.
"""
self.raw_histograms = self.calculate_histogram_sums(l1b_data["histogram"].data)
# number_of_bins_per_histogram is the count of valid (non-FILLVAL) bins.
# Histogram arrays from L1B are always GlowsConstants.STANDARD_BIN_COUNT
# (3600) long, with unused bins filled with GlowsConstants.HISTOGRAM_FILLVAL.
# All bin-dimensioned arrays here are chopped to number_of_bins so that
# computations only operate on valid data. glows_l2.py is responsible for
# re-expanding these arrays back to STANDARD_BIN_COUNT, filling unused bins
# with the appropriate CDF FILLVAL before writing to output.

self.number_of_bins = (
l1b_data["number_of_bins_per_histogram"].data[0]
if len(l1b_data["number_of_bins_per_histogram"].data) != 0
else 0
)

self.number_of_bins = l1b_data["histogram"].shape[1]
self.raw_histograms = self.calculate_histogram_sums(l1b_data["histogram"].data)[
: self.number_of_bins
]

exposure_per_epoch = (
l1b_data["spin_period_average"].data
Expand All @@ -79,16 +98,15 @@ def __post_init__(self, l1b_data: xr.Dataset) -> None:
self.exposure_times = np.full(self.number_of_bins, np.sum(exposure_per_epoch))

raw_uncertainties = np.sqrt(self.raw_histograms)
self.photon_flux = np.zeros(len(self.raw_histograms))
self.flux_uncertainties = np.zeros(len(self.raw_histograms))
self.photon_flux = np.zeros(self.number_of_bins)
self.flux_uncertainties = np.zeros(self.number_of_bins)

# TODO: Only where exposure counts != 0
if len(self.exposure_times) != 0:
self.photon_flux = self.raw_histograms / self.exposure_times
self.flux_uncertainties = raw_uncertainties / self.exposure_times

# TODO: Average this, or should they all be the same?
self.spin_angle = np.average(l1b_data["imap_spin_angle_bin_cntr"].data, axis=0)
self.spin_angle = np.zeros(0)

# Apply 'OR' operation to histogram_flag_array across all
# good-time L1B blocks per bin.
Expand All @@ -107,6 +125,28 @@ def __post_init__(self, l1b_data: xr.Dataset) -> None:
self.ecliptic_lon = np.zeros(self.number_of_bins)
self.ecliptic_lat = np.zeros(self.number_of_bins)

if self.number_of_bins:
# imap_spin_angle_bin_cntr is the raw IMAP spin angle ψ (0 - 360°,
# bin midpoints).
spin_angle_bin_cntr = l1b_data["imap_spin_angle_bin_cntr"].data[0][
: self.number_of_bins
]
# Convert ψ → ψPA (Eq. 29): position angle measured from the
# northernmost point of the scanning circle.
self.spin_angle = (spin_angle_bin_cntr - position_angle + 360.0) % 360.0

# Roll all bin arrays so bin 0 corresponds to the northernmost
# point (minimum ψPA).
roll = -np.argmin(self.spin_angle)
self.spin_angle = np.roll(self.spin_angle, roll)
self.raw_histograms = np.roll(self.raw_histograms, roll)
self.photon_flux = np.roll(self.photon_flux, roll)
self.exposure_times = np.roll(self.exposure_times, roll)
self.flux_uncertainties = np.roll(self.flux_uncertainties, roll)
self.histogram_flag_array = np.roll(self.histogram_flag_array, roll)
self.ecliptic_lon = np.roll(self.ecliptic_lon, roll)
self.ecliptic_lat = np.roll(self.ecliptic_lat, roll)

@staticmethod
def calculate_histogram_sums(histograms: NDArray) -> NDArray:
"""
Expand All @@ -123,7 +163,8 @@ def calculate_histogram_sums(histograms: NDArray) -> NDArray:
Sum of valid histograms across all timestamps.
"""
histograms = histograms.copy()
histograms[histograms == -1] = 0
# Zero out areas where HISTOGRAM_FILLVAL (i.e. unused bins)
histograms[histograms == GlowsConstants.HISTOGRAM_FILLVAL] = 0
return np.sum(histograms, axis=0, dtype=np.int64)


Expand Down Expand Up @@ -213,8 +254,8 @@ class HistogramL2:
pulse_length_std_dev: np.ndarray[np.double]
spin_period_ground_average: np.ndarray[np.double]
spin_period_ground_std_dev: np.ndarray[np.double]
position_angle_offset_average: np.ndarray[np.double]
position_angle_offset_std_dev: np.ndarray[np.double]
position_angle_offset_average: np.double
position_angle_offset_std_dev: np.double
spin_axis_orientation_std_dev: np.ndarray[np.double]
spacecraft_location_average: np.ndarray[np.double]
spacecraft_location_std_dev: np.ndarray[np.double]
Expand Down Expand Up @@ -245,8 +286,6 @@ def __init__(self, l1b_dataset: xr.Dataset, pipeline_settings: PipelineSettings)
# TODO: filter bad bins out. Needs to happen here while everything is still
# per-timestamp.

self.daily_lightcurve = DailyLightcurve(good_data)

self.total_l1b_inputs = len(l1b_dataset["epoch"])
self.number_of_good_l1b_inputs = len(good_data["epoch"])
self.identifier = -1 # TODO: retrieve from spin table
Expand Down Expand Up @@ -299,16 +338,12 @@ def __init__(self, l1b_dataset: xr.Dataset, pipeline_settings: PipelineSettings)
self.spin_period_ground_std_dev = (
good_data["spin_period_ground_average"].std(dim="epoch", keepdims=True).data
)
self.position_angle_offset_average = (
good_data["position_angle_offset_average"]
.mean(dim="epoch", keepdims=True)
.data
)
self.position_angle_offset_std_dev = (
good_data["position_angle_offset_average"]
.std(dim="epoch", keepdims=True)
.data
)

position_angle = self.compute_position_angle()
self.position_angle_offset_average: np.double = np.double(position_angle)

# Always zero - per algorithm doc 10.6
self.position_angle_offset_std_dev = np.double(0.0)
self.spacecraft_location_average = (
good_data["spacecraft_location_average"]
.mean(dim="epoch")
Expand Down Expand Up @@ -340,6 +375,8 @@ def __init__(self, l1b_dataset: xr.Dataset, pipeline_settings: PipelineSettings)
.data[np.newaxis, :]
)

self.daily_lightcurve = DailyLightcurve(good_data, position_angle)

def filter_bad_bins(self, histograms: NDArray, bin_exclusions: NDArray) -> NDArray:
"""
Filter out bad bins from the histogram.
Expand Down Expand Up @@ -392,3 +429,28 @@ def return_good_times(flags: xr.DataArray, active_flags: NDArray) -> NDArray:
# where all the active indices == 1.
good_times = np.where(np.all(flags[:, active_flags == 1] == 1, axis=1))[0]
return good_times

def compute_position_angle(self) -> float:
"""
Compute the position angle based on the instrument mounting.

This number is not expected to change significantly. It is the same for all L1B
blocks (epoch values).

Returns
-------
float
The GLOWS mounting position angle.
"""
# Calculation described in algorithm doc 10.6 (Eq. 30):
# psi_G_eff = 360 - psi_GLOWS
# where psi_GLOWS is the azimuth of the GLOWS boresight in the
# IMAP spacecraft frame, measured from the spacecraft x-axis.
# This angle does not change with time, as it is in the spinning IMAP frame.
# It basically defines the angle between x=0 in the IMAP frame and x=0 in the
# GLOWS instrument frame, and is defined by the physical mounting location of
# the instrument.
# delta_psi_G_eff is assumed to be 0 per instrument team decision (aka this
# doesn't move from the SPICE determined mounting angle.
glows_mounting_azimuth, _ = get_instrument_mounting_az_el(SpiceFrame.IMAP_GLOWS)
return (360.0 - glows_mounting_azimuth) % 360.0
13 changes: 9 additions & 4 deletions imap_processing/tests/glows/test_glows_l2.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
glows_l2,
)
from imap_processing.glows.l2.glows_l2_data import DailyLightcurve, HistogramL2
from imap_processing.glows.utils.constants import GlowsConstants
from imap_processing.spice.time import et_to_datetime64, ttj2000ns_to_et
from imap_processing.tests.glows.conftest import mock_update_spice_parameters

Expand All @@ -24,17 +25,18 @@ def l1b_hists():
hist = xr.DataArray(
np.ones((4, 5)), dims=["epoch", "bins"], coords={"epoch": epoch, "bins": bins}
)
hist[1, 0] = -1
hist[2, 0] = -1
hist[1, 1] = -1
hist[2, 3] = -1
hist[1, 0] = GlowsConstants.HISTOGRAM_FILLVAL
hist[2, 0] = GlowsConstants.HISTOGRAM_FILLVAL
hist[1, 1] = GlowsConstants.HISTOGRAM_FILLVAL
hist[2, 3] = GlowsConstants.HISTOGRAM_FILLVAL

input = xr.Dataset(coords={"epoch": epoch, "bins": bins})
input["histogram"] = hist

return input


@patch.object(HistogramL2, "compute_position_angle", return_value=42.0)
@patch.object(
HistogramL1B,
"flag_uv_and_excluded",
Expand All @@ -44,6 +46,7 @@ def l1b_hists():
def test_glows_l2(
mock_spice_function,
mock_flag_uv_and_excluded,
mock_compute_position_angle,
l1a_dataset,
mock_ancillary_exclusions,
mock_pipeline_settings,
Expand Down Expand Up @@ -75,6 +78,7 @@ def test_glows_l2(
assert any(record.levelname == "WARNING" for record in caplog.records)


@patch.object(HistogramL2, "compute_position_angle", return_value=42.0)
@patch.object(
HistogramL1B,
"flag_uv_and_excluded",
Expand All @@ -84,6 +88,7 @@ def test_glows_l2(
def test_generate_l2(
mock_spice_function,
mock_flag_uv_and_excluded,
mock_compute_position_angle,
l1a_dataset,
mock_ancillary_exclusions,
mock_pipeline_settings,
Expand Down
Loading
Loading