Skip to content

Commit

Permalink
Completed molonglo upload scripts
Browse files Browse the repository at this point in the history
  • Loading branch information
NickSwainston committed Dec 14, 2023
1 parent 3fa51a5 commit 17a41ab
Show file tree
Hide file tree
Showing 6 changed files with 58 additions and 94 deletions.
16 changes: 16 additions & 0 deletions dev_scripts/generate_all_jsons_molonglo.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#!/bin/bash

set -e

for path in $(find /fred/oz002/ldunn/meertime_dataportal/data/post -type f -name "obs.header"); do
if [ -e "${path%%obs.header}/meertime.json" ]; then
echo "Skipping $path"
else
echo "Making meetime.json for $path"
EXIT_CODE=0
generate_meerkat_json $path -o ${path%%obs.header} || EXIT_CODE=$?
if [ "$EXIT_CODE" -ne 42 ]; then
ingest_obs ${path%%obs.header}/meertime.json
fi
fi
done
2 changes: 1 addition & 1 deletion psrdb/load_data.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import os

DATA_DIR = os.path.join(os.path.dirname(__file__), "data")
MOLONGLO_CALIBRATIONS = os.path.join(DATA_DIR, "molonglo_calibrations.txt")
MOLONGLO_CALIBRATIONS = os.path.join(DATA_DIR, "molonglo_phasing.txt")
27 changes: 1 addition & 26 deletions psrdb/scripts/generate_meerkat_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,8 @@
from decouple import config
from datetime import datetime

import psrchive as psr

from psrdb.utils import header
from psrdb.utils.upload import generate_obs_length, get_archive_ephemeris


CALIBRATIONS_DIR = config("CALIBRATIONS_DIR", "/fred/oz005/users/aparthas/reprocessing_MK/poln_calibration")
Expand All @@ -21,15 +20,6 @@
SEARCH_DIR = config("SEARCH_DIR", "/fred/oz005/search")


def generate_obs_length(archive):
"""
Determine the length of the observation from the input archive file
"""

ar = psr.Archive_load(archive)
ar = ar.total()
return ar.get_first_Integration().get_duration()

def get_sf_length(sf_files):
"""
Determine the length of input sf files with the vap command
Expand Down Expand Up @@ -80,21 +70,6 @@ def get_calibration(utc_start):
raise RuntimeError(f"Could not find calibration file for utc_start={utc_start}")


def get_archive_ephemeris(freq_summed_archive):
"""
Get the ephemeris from the archive file using the vap command.
"""
comm = "vap -E {0}".format(freq_summed_archive)
args = shlex.split(comm)
proc = subprocess.Popen(args,stdout=subprocess.PIPE)
proc.wait()
ephemeris_text = proc.stdout.read().decode("utf-8")

if ephemeris_text.startswith('\n'):
# Remove newline character at start of output
ephemeris_text = ephemeris_text.lstrip('\n')
return ephemeris_text

def main():
import argparse

Expand Down
74 changes: 7 additions & 67 deletions psrdb/scripts/generate_molonglo_json.py
Original file line number Diff line number Diff line change
@@ -1,66 +1,21 @@
#!/usr/bin/env python

import os
import sys
import glob
import json
import shlex
import logging
import subprocess
from decouple import config
from datetime import datetime, timezone

import psrchive as psr

from psrdb.utils import header
from psrdb.utils.upload import generate_obs_length, get_archive_ephemeris
from psrdb.load_data import MOLONGLO_CALIBRATIONS


RESULTS_DIR = config("RESULTS_DIR", "/fred/oz002/ldunn/meertime_dataportal/data/post")


def generate_obs_length(archive):
"""
Determine the length of the observation from the input archive file
"""

ar = psr.Archive_load(archive)
ar = ar.total()
return ar.get_first_Integration().get_duration()

def get_sf_length(sf_files):
"""
Determine the length of input sf files with the vap command
"""
comm = f"vap -c length {' '.join(sf_files)}"
args = shlex.split(comm)
proc = subprocess.Popen(args, stdout=subprocess.PIPE, text=True, bufsize=1)
vap_lines = []
try:
# Read and process the output line by line in real-time
for line in iter(proc.stdout.readline, ''):
print(line, end='', flush=True)
vap_lines.append(line)

# Handle Ctrl+C to gracefully terminate the subprocess
except KeyboardInterrupt:
logging.error("Process interrupted. Terminating...")
sys.exit(1)

finally:
# Wait for the subprocess to complete
proc.wait()

lengths = []
for line in vap_lines[1:]:
if line == '':
continue
lengths.append(float(line.split()[1].strip()))
return sum(lengths)
MOLONGLO_RESULTS_DIR = config("MOLONGLO_RESULTS_DIR", "/fred/oz002/ldunn/meertime_dataportal/data/post")


def get_calibration(utc_start):
utc_start_dt = datetime.strptime(utc_start, "%Y-%m-%d-%H:%M:%S")
utc_start_dt = datetime.strptime(utc_start, "%Y-%m-%d-%H:%M:%S").replace(tzinfo=timezone.utc)
with open(MOLONGLO_CALIBRATIONS) as f:
for line in f.readlines():
if line[0] == '#':
Expand All @@ -76,21 +31,6 @@ def get_calibration(utc_start):
return calibration


def get_archive_ephemeris(freq_summed_archive):
"""
Get the ephemeris from the archive file using the vap command.
"""
comm = "vap -E {0}".format(freq_summed_archive)
args = shlex.split(comm)
proc = subprocess.Popen(args,stdout=subprocess.PIPE)
proc.wait()
ephemeris_text = proc.stdout.read().decode("utf-8")

if ephemeris_text.startswith('\n'):
# Remove newline character at start of output
ephemeris_text = ephemeris_text.lstrip('\n')
return ephemeris_text

def main():
import argparse

Expand Down Expand Up @@ -131,7 +71,7 @@ def main():
obs_data.parse()

# Find raw archive and frequency summed files
freq_summed_archive = f"{RESULTS_DIR}/{obs_data.source}/{obs_data.utc_start}/{obs_data.source}_{obs_data.utc_start}.FT"
freq_summed_archive = f"{MOLONGLO_RESULTS_DIR}/{obs_data.source}/{obs_data.utc_start}/{obs_data.source}_{obs_data.utc_start}.FT"
# Check if there are freq.sum and archive files
obs_length = generate_obs_length(freq_summed_archive)

Expand All @@ -141,8 +81,8 @@ def main():

meertime_dict = {
"pulsarName": obs_data.source,
"telescopeName": obs_data.telescope,
"projectCode": obs_data.proposal_id,
"telescopeName": "MONS",
"projectCode": "MONSPSR_TIMING",
"schedule_block_id": obs_data.schedule_block_id,
"cal_type": "pre",
"cal_location": cal_location,
Expand All @@ -151,7 +91,7 @@ def main():
"nchan": obs_data.nchan,
"beam": obs_data.beam,
"nant": obs_data.nant,
"nantEff": obs_data.nant_eff,
"nantEff": obs_data.nant,
"npol": obs_data.npol,
"obsType": obs_data.obs_type,
"utcStart": obs_data.utc_start,
Expand Down
32 changes: 32 additions & 0 deletions psrdb/utils/upload.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@

import shlex

import subprocess

import psrchive as psr


def generate_obs_length(archive):
"""
Determine the length of the observation from the input archive file
"""

ar = psr.Archive_load(archive)
ar = ar.total()
return ar.get_first_Integration().get_duration()


def get_archive_ephemeris(freq_summed_archive):
"""
Get the ephemeris from the archive file using the vap command.
"""
comm = "vap -E {0}".format(freq_summed_archive)
args = shlex.split(comm)
proc = subprocess.Popen(args,stdout=subprocess.PIPE)
proc.wait()
ephemeris_text = proc.stdout.read().decode("utf-8")

if ephemeris_text.startswith('\n'):
# Remove newline character at start of output
ephemeris_text = ephemeris_text.lstrip('\n')
return ephemeris_text
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ description = "CLI for pulsars.org.au"
authors = ["GWDC"]
license = "MIT"
packages = [{include = "psrdb"}]
include = ["psrdb/data/molonglo_phasing.txt"]

[tool.poetry.scripts]
psrdb = "psrdb.scripts.psrdb:main"
Expand Down

0 comments on commit 17a41ab

Please sign in to comment.