Skip to content

Commit

Permalink
Merge pull request #144 from pgriffin17/maximTest
Browse files Browse the repository at this point in the history
Merging Philip's changes into my own dev branch for reference
  • Loading branch information
WWGolay authored Jan 23, 2024
2 parents bbb0d9d + 303617d commit 0263106
Show file tree
Hide file tree
Showing 18 changed files with 1,058 additions and 193 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ instance/

# Sphinx documentation
docs/_build/
docs/source/user_guide/observatory_info.csv

# PyBuilder
target/
Expand Down
4 changes: 2 additions & 2 deletions .readthedocs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ sphinx:
fail_on_warning: true

# Optionally build your docs in additional formats such as PDF and ePub
formats:
- pdf
# formats:
# - pdf

# Optional but recommended, declare the Python requirements required
# to build your documentation
Expand Down
11 changes: 11 additions & 0 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@
from packaging.version import parse
from sphinx_astropy.conf.v2 import *

sys.path.insert(0, pathlib.Path(__file__).parents[0].resolve().as_posix())

import headerCSVGenerator

sys.path.insert(0, pathlib.Path(__file__).parents[2].resolve().as_posix())

import pyscope
Expand Down Expand Up @@ -57,6 +61,13 @@

extensions = list(map(lambda x: x.replace("viewcode", "linkcode"), extensions))

# Generate CSV for header info
print("Generating CSV for header info...")
targetPath = os.path.join(
os.path.dirname(__file__), "user_guide", "observatory_info.csv"
)
headerCSVGenerator.HeaderCSVGenerator().generate_csv(targetPath)


def linkcode_resolve(domain, info):
"""
Expand Down
118 changes: 118 additions & 0 deletions docs/source/headerCSVGenerator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
import ast
import csv
import inspect
import re

from pyscope.observatory import Observatory


class HeaderCSVGenerator:
"""Generates a CSV file containing the header information for the Observatory class.
The CSV file contains the following columns:
- Header Key: The key of the header
- Header Value: The value of the header
- Header Description: The description of the header
The CSV file is generated by parsing the Observatory class for the info dictionaries
and then combining them into a master dictionary. The master dictionary is then
output to a CSV file.
"""

def __init__(self):
pass

def get_info_dicts(self):
descriptors = inspect.getmembers(
Observatory, predicate=inspect.isdatadescriptor
)
info = []
for descriptor in descriptors:
if "info" in descriptor[0]:
info.append(descriptor)

source_list = []
for descriptor in info:
source_list.append(inspect.getsource(descriptor[1].fget))

# Split source into lines, remove tabs
source_lines = []
for source in source_list:
source_lines.append(source.split("\n"))

# Remove leading whitespace
stripped_lines = []
for source in source_lines:
for line in source:
stripped_lines.append(line.lstrip())

# Return parts of source_list that contain 'info = {...}'
info_dict = []
for property in source_list:
# Use regex to find info = {[^}]} and add it to info_dict
info_dict.append(re.findall(r"info = {[^}]*}", property)[0])

# Remove 'info = ' from each string
for i, property in enumerate(info_dict):
info_dict[i] = property[7:]

# Encase any references to self. in quotes
for i, property in enumerate(info_dict):
info_dict[i] = re.sub(r"self\.([a-zA-Z0-9_.\[\]]+)", r'"\1"', property)

# Find references to str()
for i, property in enumerate(info_dict):
info_dict[i] = re.sub(r"(str\(([a-zA-Z\"\_\.\[\]]+)\))", r"\2", property)

# Replace any references to self.etc. with None
for i, property in enumerate(info_dict):
# Use regex "\(\s+(.*?\],)"gm
# info_dict[i] = re.sub(r'\(\\n\s+(.*?\],)', 'None', property)
group = re.findall(r"\(\n\s+([\s\S]*?\],)", property)
# replace the group with None
if group:
info_dict[i] = property.replace(group[0], "None,")

# Enclose any parts with (sep="dms") in quotes
for i, property in enumerate(info_dict):
info_dict[i] = re.sub(
r"(\"\S+\(sep=\"dms\"\))",
lambda m: '"' + m.group(1).replace('"', " ") + '"',
property,
)

# Remove any parts matching \% i(?=\)) (or replace with "")
for i, property in enumerate(info_dict):
info_dict[i] = re.sub(r"( \% i(?=\)))", "", property)

# Enclose in quotes any parts matching 'not \"\S+\" is None'
for i, property in enumerate(info_dict):
info_dict[i] = re.sub(
r"(not \"\S+\" is None)",
lambda m: '"' + m.group(1).replace('"', " ") + '"',
property,
)

# Pass each info_dict through ast.literal_eval to convert to dictionary
info_dict_parsed = []
for info in info_dict:
info_dict_parsed.append(ast.literal_eval(info))

return info_dict_parsed

def generate_csv(self, filename):
info_dicts = self.get_info_dicts()
# Add each dictionary to make a master dictionary
master_dict = {}
for info in info_dicts:
master_dict.update(info)
# Output to csv in the format key, value, description
# key is the key of the dictionary
# value is the first part of the tuple (the value)
# description is the second part of the tuple (the description)
with open(filename, "w", newline="") as csv_file:
writer = csv.writer(csv_file)
# Write header
writer.writerow(["Header Key", "Header Value", "Header Description"])
for key, value in master_dict.items():
writer.writerow([key, value[0], value[1]])
15 changes: 15 additions & 0 deletions docs/source/user_guide/header.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
Header
======

This is a page listing all potential header keywords for an `Observatory` object.

.. Note::
This is auto-generated from the `info` dictionaries in the `Observatory` class. Some
of the information is not available for all observatories, and some header values may
contain nonsense due to the auto-generation script.

.. csv-table:: Sample Header
:file: observatory_info.csv
:widths: 4, 6, 10
:header-rows: 1

1 change: 1 addition & 0 deletions docs/source/user_guide/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ User Guide
:maxdepth: 2

examples
header
logging
config
help
154 changes: 154 additions & 0 deletions pyscope/config/test_observatory.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
[site]

site_name = Winer Observatory

instrument_name = Robert L. Mutel Telescope

instrument_description = 20-inch PlaneWave CDK

latitude = 31:39:56.08 # dd:mm:ss.s

longitude = -110:36:06.42 # dd:mm:ss.s

elevation = 1515.7 # meters

diameter = 0.508 # meters

focal_length = 3.454 # meters


[camera]

camera_driver = maxim

camera_ascom = False

camera_kwargs =

cooler_setpoint = -20 # Celsius

cooler_tolerance = 1 # Celsius

max_dimension = 4096 # pixels


[cover_calibrator]

cover_calibrator_driver = ip_cover_calibrator

cover_calibrator_ascom = False

cover_calibrator_kwargs = tcp_ip:192.168.2.22,tcp_port:2101,buffer_size:1024

cover_calibrator_alt = 30.09397

cover_calibrator_az = 86.96717


[dome]

dome_driver =

dome_ascom =

dome_kwargs =


[filter_wheel]

filter_wheel_driver = maxim

filter_wheel_ascom = False

filter_wheel_kwargs =

filters = L, 6, V, B, H, W, O, 1, I, X, G, R

filter_focus_offsets = 0, 1400, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
# comma-separated list of focus offsets (in counts) for each filter


[focuser]

focuser_driver = ASCOM.PWI3.Focuser

focuser_ascom = True

focuser_kwargs =


[observing_conditions]

observing_conditions_driver = html_observing_conditions

observing_conditions_ascom = False

observing_conditions_kwargs = url:https://winer.org/Site/Weather.php


[rotator]

rotator_driver =

rotator_ascom =

rotator_kwargs =

rotator_reverse =

rotator_min_angle =

rotator_max_angle =


[safety_monitor]

driver_0 = html_safety_monitor,False,url:https://winer.org/Site/Roof.php

driver_1 =

driver_2 =


[switch]

driver_0 =

driver_1 =

driver_2 =


[telescope]

telescope_driver = SiTech.Telescope

telescope_ascom = True

telescope_kwargs =

min_altitude = 21 # degrees

settle_time = 5


[autofocus]

autofocus_driver = pwi_autofocus

autofocus_kwargs =


[wcs]

driver_0 = maxim

driver_1 = astrometry_net_wcs

driver_2 =

[scheduling]

slew_rate = 2 # degrees per second

instrument_reconfiguration_times =
35 changes: 35 additions & 0 deletions pyscope/observatory/ascom_camera.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,6 +257,41 @@ def HeatSinkTemperature(self):

@property
def ImageArray(self):
"""Return the image array as a numpy array of the correct data type and in
standard FITS orientation. \b
Return the image array as a numpy array of the correct data type. The
data type is determined by the MaxADU property. If the MaxADU property
is not defined, or if it is less than or equal to 65535, the data type
will be numpy.uint16. If the MaxADU property is greater than 65535, the
data type will be numpy.uint32.
.. Note::
The image array is returned in the standard FITS orientation, which
deviates from the ASCOM standard (see below).
The image array is returned in the standard FITS orientation, with the
rows and columns transposed (if `_DoTranspose` is `True`). This is the same orientation as the
astropy.io.fits package. This is done because the ASCOM standard
specifies that the image array should be returned with the first index
being the column and the second index being the row. This is the
opposite of the FITS standard, which specifies that the first index
should be the row and the second index should be the column. The
astropy.io.fits package follows the FITS standard, so the image array
returned by the pyscope ASCOM driver is transposed to match the FITS
standard.
Parameters
----------
None
Returns
-------
numpy.ndarray
The image array as a numpy array of the correct data type.
Rows and columns are transposed to match the FITS standard.
"""
logger.debug(f"ASCOMCamera.ImageArray property called")
img_array = self._device.ImageArray
# Convert to numpy array and check if it is the correct data type
Expand Down
Loading

0 comments on commit 0263106

Please sign in to comment.