The ACA plate scale was last calibrated in 2002. In 2020 we now have a factor of 10 more available star data, and in particular the ACA housing temperature covers the range up to +30 C, while in 2002 the data were all taken near 14 to 15C.
The first task is to use Ska data products to assemble a table of suitable observed guide star data.
%matplotlib inline
import matplotlib.pyplot as plt
import matplotlib.style
matplotlib.style.use('bmh')
import os
import numpy as np
from pathlib import Path
import sys
sys.path.insert(0, '/Users/aldcroft/git/agasc')
import agasc
from astropy.table import Table
from Chandra.Time import DateTime
from mica.archive import asp_l1
from mica.stats import guide_stats
from Ska.engarchive import fetch_sci
from chandra_aca.transform import yagzag_to_radec, radec_to_yagzag, pixels_to_yagzag
from astropy.utils.console import ProgressBar
from Quaternion import Quat
from IPython.core.display import display
from ipywidgets import IntProgress
def get_good_stars():
"""Get table of guide_star stats and make cuts"""
gs = guide_stats.get_stats()
# Cut stars in especially problematic observations
gs = gs[gs['known_bad'] == False]
# Just keep columns of interest
gs = gs[['obsid', 'kalman_tstart', 'kalman_datestart',
'slot', 'agasc_id',
'aoacmag_mean', 'aoacyan_mean', 'aoaczan_mean',
'f_track','f_within_1',
# 'ra','dec','epoch','pm_ra','pm_dec','yang', 'zang',
# , 'dy_mean', 'dz_mean'
]]
ok = ((gs['kalman_tstart'] > DateTime('2002-07-01').secs)
& (gs['aoacmag_mean'] < 9.5)
# tracked more than 95% of interval
& (gs['f_track'] > .95)
# tracked with a position within 1 arcsec of expected position > 95% the interval
& (gs['f_within_1'] > .95)
# Remove Engineering Request obsids
& (gs['obsid'] < 38000))
gs = gs[ok]
return Table(gs)
def update_ra_dec(gs):
"""Update RA and Dec using latest AGASC (1.7)"""
stars = agasc.get_stars(ids=gs['agasc_id'], dates=gs['kalman_tstart'])
gs['ra'] = stars['RA_PMCORR']
gs['dec'] = stars['DEC_PMCORR']
class NoAspCentroidsError(ValueError):
pass
def get_centroids(stars, cen_dt=15):
"""Get level-1 centroid data for each star.
:param stars: Table with guide star data for an obsid
:param cen_dt: Seconds +/- median time to include centroids within
:returns: Table with centroids, mid-time of data
"""
obsid = stars['obsid'][0]
# make table for acacent
cen_files = asp_l1.get_files(obsid, content=['ACACENT'])
if cen_files:
cens = Table.read(cen_files[-1])
else:
raise NoAspCentroidsError(f'Obsid {obsid} is absent from ACACENT')
# Only include centroids +/- 15 seconds around the mid time for the observation interval.
# Record this time so it can be used for getting telemetered temp data and quaternions
mid_time = cens['time'].mean()
ok = ((cens['time'] >= mid_time - cen_dt)
& (cens['time'] <= mid_time + cen_dt)
& (cens['alg'] == 8)
& (cens['status'] == 0))
cens = cens[ok]
ok = np.in1d(cens['slot'], stars['slot'])
cens = cens[ok]
# Convert to arcsec
cens['ang_y'] *= 3600
cens['ang_z'] *= 3600
return cens, mid_time
def calc_mid_val(times, vals, mid_time, val_clip=None, std_err_min=0.0, n_sigma=3, poly_deg=2):
"""Compute smoothed estimate of the values at time mid_time.
This uses a polynomial fit to the data with one round of sigma-clipping.
Testing: injected a point with a 5 arcsec error and one with a 1.5 arcsec
error and confirmed that both are rejected and that final mid valroid
is very close to expected.
:param times: times of values
:param vals: values
:param mid_time: time for fit value
:param val_clip: value for initial sanity check clipping
:param std_err_min: minimum value for std err
:param n_sigma: number of sigma for clipping
:param poly_deg: degree of polynomial for fitting
"""
val_median = np.median(vals)
vals = vals - val_median
times = times - mid_time
# First filter any valroids that are more than 3 arcsec (0.6 pixel) from median
if val_clip:
ok = np.abs(vals) < val_clip
vals = vals[ok]
times = times[ok]
# Fit a quadratic to data points and get predicted values at sample times
coeff = np.polyfit(x=times, y=vals, deg=poly_deg)
vals_fit = np.polyval(x=times, p=coeff)
# Do one round of sigma-clipping. Cap (minimum) std_err at 0.1 arcsec.
errs = vals - vals_fit
std_err = max(np.sqrt(np.mean(errs ** 2)), std_err_min)
ok = np.abs(errs) < n_sigma * std_err
vals = vals[ok]
times = times[ok]
# Re-fit using clipped points and then return estimated value at mid_time
coeff = np.polyfit(x=times, y=vals, deg=poly_deg)
out = np.polyval(x=0, p=coeff)
return out + val_median
def process_stars(stars):
"""Fill in the star_{yag,zag}, t_aca, and cent_{i,j} values in the ``stars`` table
which is a table of stars for one obsid.
Updates the ``stars`` table in-place.
"""
cens, mid_time = get_centroids(stars)
q_att = get_q_att_mid(mid_time)
t_aca = get_t_housing_mid(mid_time)
stars['t_aca'] = t_aca
val_clip = 1.0 # pixels = 5 arcsec
std_err_min = 0.1 / 5 # 0.1 arcsec
for star in stars:
# Get smoothed/filtered estimate of pixel centroids at mid_time
cen = cens[cens['slot'] == star['slot']]
star['cent_i'] = calc_mid_val(cen['time'], cen['cent_i'], mid_time, val_clip, std_err_min)
star['cent_j'] = calc_mid_val(cen['time'], cen['cent_j'], mid_time, val_clip, std_err_min)
# Convert ra/dec to ang_y/z using single q_att
star['star_yag'], star['star_zag'] = radec_to_yagzag(star['ra'], star['dec'], q_att)
def get_q_att_mid(mid_time):
"""Fetch quaternions starting at mid-time, then do interpolation using filter_bad=True
and bad_union=True so that only times where all 4 are sampled get returned. Then
use the first available sample."""
msids = ['AOATTQT1', 'AOATTQT2', 'AOATTQT3', 'AOATTQT4']
qatt_telem = fetch_sci.MSIDset(msids, mid_time, mid_time + 20)
qatt_telem.interpolate(filter_bad=True, bad_union=True)
q_att = Quat([qatt_telem[msid].vals[0] for msid in msids])
return q_att
def get_t_housing_mid(mid_time):
""" Get ACA temps for for +/- 6 hours around the mid time of the observation.
Don't use AACH2T since it's not in the pipeline
"""
msids = ['AACH1T','AAOTALT','AAOTASMT']
dt = 3600 * 6
t_start = mid_time - dt
t_end = mid_time + dt
aca_telem = fetch_sci.MSIDset(msids, mid_time - dt, mid_time + dt)
aca_telem.interpolate(filter_bad=True, bad_union=True)
t_aca = np.sum(np.vstack([aca_telem[msid].vals for msid in msids]), axis=0) / 3
t_aca_mid = calc_mid_val(aca_telem.times, t_aca, mid_time, val_clip=10, std_err_min=0.3, poly_deg=3)
return t_aca_mid
def add_samples(stars_all, accept_fraction=0.01):
"""Fill in the star_{yag,zag}, t_aca, and cent_{i,j} values in the stars_all table.
This can be relatively slow (1-10 seconds per obsid depending on
machine / network), so it allows for a crude way of randomly
selecting only a fraction of possible stars, with a bias toward
more recent observations.
One can re-run this routine to continue filling in data while playing
with the data for other processing / fitting.
"""
n_max = len(stars_all.groups)
bar = IntProgress(min=0, max=n_max) # instantiate the bar
display(bar) # display the bar
n_accept = 0
for ii, stars in enumerate(stars_all.groups):
bar.value = ii
obsid = stars['obsid'][0]
if obsid < 10000:
accept = accept_fraction
elif obsid < 15000:
accept = accept_fraction * 2
else:
accept = accept_fraction * 3
if len(stars) > 1 and stars['t_aca'][0] == -99.0 and np.random.uniform() < accept:
try:
process_stars(stars)
except NoAspCentroidsError as err:
print(str(err))
else:
# Write to file each 100 new samples
n_accept += 1
if n_accept % 100 == 0:
stars_all.write('stars_all.fits', overwrite=True)
if ii >= n_max:
break
stars_all.write('stars_all.fits', overwrite=True)
stars_all_file = 'stars_all.fits'
if Path(stars_all_file).exists():
stars_all = Table.read(stars_all_file)
else:
gs = get_good_stars()
update_ra_dec(gs)
# Add 5 columns that get filled in later
stars_all['star_yag'] = -99.
stars_all['star_zag'] = -99.
stars_all['t_aca'] = -99.
stars_all['cent_i'] = -99.
stars_all['cent_j'] = -99.
stars_all = gs.group_by('obsid')
# Show the stars in the first obsid in the sample
stars_all.groups[0]
obsid | kalman_tstart | kalman_datestart | slot | agasc_id | aoacmag_mean | aoacyan_mean | aoaczan_mean | f_track | f_within_1 | ra | dec | star_yag | star_zag | t_aca | cent_i | cent_j |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
int64 | float64 | bytes21 | int64 | int64 | float64 | float64 | float64 | float64 | float64 | float64 | float64 | float64 | float64 | float64 | float64 | float64 |
2519 | 162519980.317 | 2003:056:00:25:16.133 | 3 | 182985592 | 6.81500101089 | -749.396230767 | -863.871306691 | 1.0 | 0.999151518638 | 132.687997004 | 18.8321606551 | -99.0 | -99.0 | -99.0 | -99.0 | -99.0 |
2519 | 162519980.317 | 2003:056:00:25:16.133 | 4 | 182991352 | 8.85400772095 | -2145.82534339 | -1754.21378231 | 1.0 | 0.991713899358 | 132.561031045 | 19.276204758 | -99.0 | -99.0 | -99.0 | -99.0 | -99.0 |
2519 | 162519980.317 | 2003:056:00:25:16.133 | 6 | 182989392 | 9.26144313812 | 2367.95521243 | 606.898414156 | 0.999769923212 | 0.969152152008 | 132.82511688 | 17.8836924215 | -99.0 | -99.0 | -99.0 | -99.0 | -99.0 |
add_samples(stars_all, accept_fraction=0.02)
IntProgress(value=0, max=14133)
Obsid 2881 is absent from ACACENT Obsid 17373 is absent from ACACENT Obsid 17375 is absent from ACACENT