Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support CXOTIME_NOW environment variable #270

Merged
merged 3 commits into from
Jan 28, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 2 additions & 5 deletions cheta/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -561,9 +561,7 @@ def __init__(self, msid, start=LAUNCH_DATE, stop=None, filter_bad=False, stat=No
start, stop = intervals[0][0], intervals[-1][1]

self.tstart = DateTime(start).secs
self.tstop = (
DateTime(stop).secs if stop else DateTime(time.time(), format="unix").secs
)
self.tstop = DateTime(stop).secs
self.datestart = DateTime(self.tstart).date
self.datestop = DateTime(self.tstop).date
self.data_source = {}
Expand Down Expand Up @@ -2286,8 +2284,7 @@ def create_msid_data_gap(msid_obj: MSID, data_gap_spec: str):
start = CxoTime(args.start)
stop = CxoTime(args.stop)
logger.info(
f"Creating data gap for {msid_obj.MSID} "
f"from {start.date} to {stop.date}"
f"Creating data gap for {msid_obj.MSID} from {start.date} to {stop.date}"
)
i0, i1 = np.searchsorted(msid_obj.times, [start.secs, stop.secs])
for attr in msid_obj.colnames:
Expand Down
7 changes: 7 additions & 0 deletions cheta/tests/test_fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -367,6 +367,13 @@ def test_interpolate_times_raise():
dat.interpolate(10.0, times=[1, 2])


def test_cxotime_now(monkeypatch):
monkeypatch.setenv("CXOTIME_NOW", "2025:002:00:00:00")
dat = fetch.Msid("tephin", "2025:001:12:00:00", stat="5min")
assert CxoTime(dat.times[-1]).date < "2025:002:00:00:00.000"
assert len(dat) == 132 # Matches number of 5 min intervals in 12 hours


def test_interpolate_times():
dat = fetch.MSIDset(
["aoattqt1", "aogyrct1", "aopcadmd"], "2008:002:21:48:00", "2008:002:21:50:00"
Expand Down
6 changes: 3 additions & 3 deletions cheta/update_archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,11 +255,11 @@ def main_loop():
create_content_dir()

if not os.path.exists(msid_files["colnames"].abs):
logger.info(f'No colnames.pickle for {ft["content"]} - skipping')
logger.info(f"No colnames.pickle for {ft['content']} - skipping")
continue

if not os.path.exists(fetch.msid_files["archfiles"].abs):
logger.info(f'No archfiles.db3 for {ft["content"]} - skipping')
logger.info(f"No archfiles.db3 for {ft['content']} - skipping")
continue

# Column names for stats updates (without TIME, MJF, MNF, TLM_FMT)
Expand Down Expand Up @@ -848,7 +848,7 @@ def truncate_archive(filetype, date):
"""Truncate msid and statfiles for every archive file after date (to nearest
year:doy)
"""
logger.info(f'Truncating {filetype["content"]} full and stat files after {date}')
logger.info(f"Truncating {filetype['content']} full and stat files after {date}")
colnames = pickle.load(open(msid_files["colnames"].abs, "rb"))

date = DateTime(date).date
Expand Down
16 changes: 8 additions & 8 deletions cheta/update_client_archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -509,7 +509,7 @@ def as_python(val):
vals = {
name: as_python(archfile[name]) for name in archfile.dtype.names
}
logger.debug(f'Inserting {vals["filename"]}')
logger.debug(f"Inserting {vals['filename']}")
if not opt.dry_run:
try:
db.insert(vals, "archfiles")
Expand Down Expand Up @@ -754,7 +754,7 @@ def append_stat_col(dat, stat_file, msid, date_id, opt, logger):
vals = {key: dat[f"{msid}.{key}"] for key in ("data", "row0", "row1")}
logger.debug(
f"append_stat_col msid={msid} date_id={date_id}, "
f'row0,1 = {vals["row0"]} {vals["row1"]}'
f"row0,1 = {vals['row0']} {vals['row1']}"
)

mode = "r" if opt.dry_run else "a"
Expand All @@ -765,7 +765,7 @@ def append_stat_col(dat, stat_file, msid, date_id, opt, logger):
if vals["row1"] - 1 <= last_row_idx:
logger.debug(
f"Skipping {date_id} for {msid}: no new data "
f'row1={vals["row1"]} last_row_idx={last_row_idx}'
f"row1={vals['row1']} last_row_idx={last_row_idx}"
)
return

Expand All @@ -780,14 +780,14 @@ def append_stat_col(dat, stat_file, msid, date_id, opt, logger):
if vals["row0"] != len(h5.root.data):
raise RowMismatchError(
f"ERROR: unexpected discontinuity for stat msid={msid} "
f'content={fetch.ft["content"]}\n'
f"content={fetch.ft['content']}\n"
"Looks like your archive is in a bad state, CONTACT "
"your local Ska expert with this info:\n"
f' First row0 in new data {vals["row0"]} != '
f" First row0 in new data {vals['row0']} != "
f"length of existing data {len(h5.root.data)}"
)

logger.debug(f'Appending {len(vals["data"])} rows to {stat_file}')
logger.debug(f"Appending {len(vals['data'])} rows to {stat_file}")
if not opt.dry_run:
h5.root.data.append(vals["data"])

Expand Down Expand Up @@ -873,10 +873,10 @@ def append_h5_col(opt, msid, vals, logger, msid_files):
if vals["row0"] != len(h5.root.data):
raise RowMismatchError(
f"ERROR: unexpected discontinuity for full msid={msid} "
f'content={fetch.ft["content"]}\n'
f"content={fetch.ft['content']}\n"
"Looks like your archive is in a bad state, CONTACT "
"your local Ska expert with this info:\n"
f' First row0 in new data {vals["row0"]} != '
f" First row0 in new data {vals['row0']} != "
f"length of existing data {len(h5.root.data)}"
)

Expand Down
8 changes: 4 additions & 4 deletions cheta/update_server_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ def check_index_tbl_consistency(index_tbl):

for idx, row0, row1 in zip(count(), index_tbl[:-1], index_tbl[1:]):
if row0["row1"] != row1["row0"]:
msg = f'rows not contiguous at table date0={index_tbl["date_id"][idx]}'
msg = f"rows not contiguous at table date0={index_tbl['date_id'][idx]}"
return msg

# No problems
Expand Down Expand Up @@ -308,7 +308,7 @@ def update_index_file(index_file, opt, logger):
break

if not rows:
logger.info(f'No updates available for content {fetch.ft["content"]}')
logger.info(f"No updates available for content {fetch.ft['content']}")
return index_tbl

# Create table from scratch or add new rows. In normal processing there
Expand Down Expand Up @@ -367,8 +367,8 @@ def update_sync_data_full(content, logger, row):
with DBI(dbi="sqlite", server=fetch.msid_files["archfiles"].abs) as dbi:
query = (
"select * from archfiles "
f'where filetime >= {row["filetime0"]} '
f'and filetime <= {row["filetime1"]} '
f"where filetime >= {row['filetime0']} "
f"and filetime <= {row['filetime1']} "
"order by filetime "
)
archfiles = dbi.fetchall(query)
Expand Down
Loading