Skip to content

Commit

Permalink
Further work on the molonglo ingest
Browse files Browse the repository at this point in the history
  • Loading branch information
NickSwainston committed Dec 8, 2023
1 parent 37a68cf commit 3fa51a5
Show file tree
Hide file tree
Showing 4 changed files with 109 additions and 41 deletions.
82 changes: 82 additions & 0 deletions psrdb/data/molonglo_phasing.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
### log of phase/delay applications
### ls -la --time-style=long-iso -rt | awk '{print $6, $8}' > phasing.log
2021-01-19 2021.01.19.txt
2021-01-20 2021.01.20.txt
2021-01-28 2021.01.23.txt
2021-01-28 2021.01.28.txt
2021-02-22 2021.02.22.txt
2021-02-27 2021.02.26.txt
2021-03-02 2021.03.02.txt
2021-03-03 2021.03.03.txt
2021-03-05 2021.03.05.txt
2021-03-09 2021.03.09.txt
2021-03-10 2021.03.10.txt
2021-03-12 2021.03.11.txt
2021-03-15 2021.03.14.txt
2021-03-22 2021.03.19.txt
2021-03-22 2021.03.21.txt
2021-03-25 2021.03.24.txt
2021-03-26 2021.03.25.txt
2021-03-30 2021.03.26.txt
2021-04-22 xcor-2021-04-21-CJ1935-4620_without_changes.txt
2021-04-28 xcor-2021-04-21-CJ1935-4620_without_changes
2021-05-02 2020.04.21-tilt-offset.txt
2021-05-06 cable_delays_J1935_2021-04-20_tilt_0.0035_+0.35.txt
2021-05-10 2021.05.10.txt
2021-05-18 xcor-2021-05-18-CJ0408-6545.txt
2021-05-20 xcor-2021-05-20-CJ0408-6545.txt
2021-05-25 2021.05.23.txt
2021-05-31 2021.05.31.txt
2021-06-01 xcor-2021-05-31-CJ0408-6545.txt
2021-06-09 2021.05.31.northoffset_60deg.txt
2021-06-25 xcor-2021-06-24-CJ1935-4620.txt
2021-07-10 xcor-2021-07-09-CJ1935-4620.txt
2021-07-23 xcor-2021-07-22-CJ1935-4620.txt
2021-08-14 xcor-2021-08-12-CJ1935-4620.txt
2021-09-08 xcor-2021-09-07-CJ0408-6545.txt
2021-09-09 xcor-2021-09-08-CJ1935-4620.txt
2021-09-30 xcor-2021-09-28-CJ1935-4620.txt
2021-10-20 xcor-2021-10-15-CJ1935-4620.txt
2021-11-03 xcor-2021-11-02-CJ1935-4620.txt
2021-11-18 xcor-2021-11-17-CJ0408-6545.txt
2021-11-20 xcor-2021-11-20-CJ1935-4620.txt
2021-11-22 xcor-2021-11-19-CJ1935-4620.txt
2021-12-01 xcor-2021-12-01-CJ1935-4620.txt
2021-12-09 xcor-2021-12-12-CJ1935-4620.txt
2021-12-17 xcor-2021-12-01-CJ1935-4620_del_20_mod_10.txt
2022-01-06 xcor-2021-07-20-CJ1935-4620.txt
2022-01-10 xcor-2022-01-10-CJ1935-4620.txt
2022-01-11 xcor-2022-01-10-CJ0408-6545.txt
2022-01-25 xcor-2022-01-25-CJ1935-4620.txt
2022-02-05 xcor-2022-02-03-CJ0408-6545.txt
2022-02-14 xcor-2022-02-09-CJ1935-4620.txt
2022-02-15 cable_delays_CJ1935_22-02-09.txt
2022-02-16 xcor-2022-02-10-CJ0408-6545.txt
2022-03-28 CJ1935_2022_03_25.txt
2022-04-21 CJ0408_2022_04_21.txt
2022-04-22 CJ1935-4620_2022_04_21.txt
2022-04-26 CJ1935-4620_2022_04_25.txt
2022-04-29 CJ1935-4620_2022_04_26.txt
2022-05-02 CJ0408-6545_2022_05_02.txt
2022-05-09 CJ1935-4620_2022_05_04.txt
2022-05-10 CJ1935-4620_2022_05_09.txt
2022-06-08 xcor-2022-06-07-CJ1935-4620.txt
2022-07-03 xcor-2022-06-30-CJ1935-4620.txt
2022-07-29 CJ1935-4620_2022_07_29.txt
2022-08-08 CJ1935-4620_2022_08_08.txt
2022-08-19 CJ1935-4620_2022_08_19.txt
2022-09-10 CJ1935-4620_2022_09_10.txt
2022-09-22 CJ1935-4620_2022_09_22.txt
2022-10-02 CJ1935-4620_2022_10_01.txt
2022-10-07 CJ1935-4620_2022_10_07.txt
2022-11-03 CJ1935-4620_2022_11_03.txt
2023-01-11 CJ0408-6545_2023_01_11.txt
2023-01-25 CJ0408-6545_2023_01_25.txt
2023-02-03 CJ0408-6545_2023_02_03.txt
2023-03-07 xcor-2023-03-07-CJ0408-6545.txt
2023-03-30 xcor-2023-03-30-CJ1935-4620.txt
2023-04-06 xcor-2023-04-06-CJ1935-4620.txt
2023-05-01 xcor-2023-05-01-CJ0408-6545.txt
2023-05-06 xcor-2023-05-06-CJ1935-4620.txt
2023-06-06 xcor-2023-06-06-CJ1935-4620.txt
2023-06-28 xcor-2023-06-28-CJ1935-4620.txt
4 changes: 4 additions & 0 deletions psrdb/load_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
import os

DATA_DIR = os.path.join(os.path.dirname(__file__), "data")
MOLONGLO_CALIBRATIONS = os.path.join(DATA_DIR, "molonglo_calibrations.txt")
60 changes: 19 additions & 41 deletions psrdb/scripts/generate_molonglo_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,15 @@
import logging
import subprocess
from decouple import config
from datetime import datetime
from datetime import datetime, timezone

# import psrchive as psr
import psrchive as psr

from psrdb.utils import header
from psrdb.load_data import MOLONGLO_CALIBRATIONS


CALIBRATIONS_DIR = config("CALIBRATIONS_DIR", "/fred/oz005/users/aparthas/reprocessing_MK/poln_calibration")
RESULTS_DIR = config("RESULTS_DIR", "/fred/oz005/kronos")
FOLDING_DIR = config("FOLDING_DIR", "/fred/oz005/timing")
SEARCH_DIR = config("SEARCH_DIR", "/fred/oz005/search")
RESULTS_DIR = config("RESULTS_DIR", "/fred/oz002/ldunn/meertime_dataportal/data/post")


def generate_obs_length(archive):
Expand Down Expand Up @@ -63,21 +61,19 @@ def get_sf_length(sf_files):

def get_calibration(utc_start):
utc_start_dt = datetime.strptime(utc_start, "%Y-%m-%d-%H:%M:%S")
auto_cal_epoch = "2020-04-04-00:00:00"
auto_cal_epoch_dt = datetime.strptime(auto_cal_epoch, "%Y-%m-%d-%H:%M:%S")
with open(MOLONGLO_CALIBRATIONS) as f:
for line in f.readlines():
if line[0] == '#':
continue

if utc_start_dt > auto_cal_epoch_dt:
return ("pre", None)
date = utc_start_dt.strptime(line.split(' ')[0], '%Y-%m-%d').replace(tzinfo=timezone.utc)
delta = date - utc_start_dt
if delta.total_seconds() > 0:
break

cals = sorted(glob.glob(f"{CALIBRATIONS_DIR}/*.jones"), reverse=True)
for cal in cals:
cal_file = os.path.basename(cal)
cal_epoch = cal_file.rstrip(".jones")
cal_epoch_dt = datetime.strptime(cal_epoch, "%Y-%m-%d-%H:%M:%S")
if cal_epoch_dt < utc_start_dt:
return ("post", cal)
calibration = line.split(' ')[1].strip()

raise RuntimeError(f"Could not find calibration file for utc_start={utc_start}")
return calibration


def get_archive_ephemeris(freq_summed_archive):
Expand Down Expand Up @@ -135,29 +131,11 @@ def main():
obs_data.parse()

# Find raw archive and frequency summed files
freq_summed_archive = f"{RESULTS_DIR}/{args.beam}/{obs_data.utc_start}/{obs_data.source}/freq.sum"
if obs_data.obs_type == "fold":
archive_files = glob.glob(f"{FOLDING_DIR}/{obs_data.source}/{obs_data.utc_start}/{args.beam}/*/*.ar")
elif obs_data.obs_type == "search":
archive_files = glob.glob(f"{SEARCH_DIR}/{obs_data.source}/{obs_data.utc_start}/{args.beam}/*/*.sf")
if obs_data.obs_type != "cal":
if not os.path.exists(freq_summed_archive) and not archive_files:
logging.error(f"Could not find freq.sum and archive files for {obs_data.source} {obs_data.utc_start} {args.beam}")
sys.exit(42)


# Check if ther are freq.sum and archive files
if obs_data.obs_type == "cal":
obs_length = -1
elif not os.path.exists(freq_summed_archive):
logging.warning(f"Could not find freq.sum file for {obs_data.source} {obs_data.utc_start}")
logging.warning("Finding observation length from archive files (This may take a while)")
obs_length = get_sf_length(archive_files)
else:
# Grab observation length from the frequency summed archive
obs_length = generate_obs_length(freq_summed_archive)
freq_summed_archive = f"{RESULTS_DIR}/{obs_data.source}/{obs_data.utc_start}/{obs_data.source}_{obs_data.utc_start}.FT"
# Check if there are freq.sum and archive files
obs_length = generate_obs_length(freq_summed_archive)

cal_type, cal_location = get_calibration(obs_data.utc_start)
cal_location = get_calibration(obs_data.utc_start)

ephemeris_text = get_archive_ephemeris(freq_summed_archive)

Expand All @@ -166,7 +144,7 @@ def main():
"telescopeName": obs_data.telescope,
"projectCode": obs_data.proposal_id,
"schedule_block_id": obs_data.schedule_block_id,
"cal_type": cal_type,
"cal_type": "pre",
"cal_location": cal_location,
"frequency": obs_data.frequency,
"bandwidth": obs_data.bandwidth,
Expand Down
4 changes: 4 additions & 0 deletions psrdb/utils/header.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,10 @@ def parse(self):
self.source = self.cfg["SOURCE"]
self.utc_start = self.cfg["UTC_START"]
self.telescope = self.cfg["TELESCOPE"]
if "BEAM" in self.cfg.keys():
self.beam = self.cfg["BEAM"]
else:
self.beam = None
if "DELAYCAL_ID" in self.cfg.keys():
self.delaycal_id = self.cfg["DELAYCAL_ID"]
else:
Expand Down

0 comments on commit 3fa51a5

Please sign in to comment.