diff --git a/.gitignore b/.gitignore index 5a992e510..436f1f987 100644 --- a/.gitignore +++ b/.gitignore @@ -18,12 +18,13 @@ exoctk/__pycache__ *.c # Other generated files -*/_version.py */cython_version.py htmlcov .coverage MANIFEST .ipynb_checkpoints +contam_tool.log +soss_contam.h5 # Sphinx docs/api diff --git a/docker/.env b/docker/.env new file mode 100644 index 000000000..456ab08a4 --- /dev/null +++ b/docker/.env @@ -0,0 +1,12 @@ +# set the HAS_EXO variable depending on whether EXOCTK_DATA is defined. +# HAS_EXO=${${EXOCTK_DATA+copy}:-download} (doesn't work?) +HAS_EXO=${EXOCTK_DATA+copy} +HAS_EXO=${HAS_EXO:-download} + +# set the HAS_PANDEIA variable depending on whether pandeia_refdata is defined. +HAS_PANDEIA=${pandeia_refdata+copy} +HAS_PANDEIA=${HAS_PANDEIA:-download} + +# set the HAS_SYN variable depending on whether PYSYN_CDBS is defined. +HAS_SYN=${PYSYN_CDBS+copy} +HAS_SYN=${HAS_SYN:-download} diff --git a/docker/celery.yaml b/docker/celery.yaml new file mode 100644 index 000000000..e69de29bb diff --git a/docker/compose.yaml b/docker/compose.yaml new file mode 100644 index 000000000..8a49e0fe1 --- /dev/null +++ b/docker/compose.yaml @@ -0,0 +1,56 @@ +include: + - redis.yaml +# - celery.yaml +services: + # Data Store Images + exoctk: + build: + context: ./exoctk + dockerfile: Dockerfile + args: + - exoctk_version=v1.2.6.3 + - python_version=3.11 + - exoctk_data=$EXOCTK_DATA + - has_exo=$HAS_EXO + volumes: + - exoctk_data:/exoctk/exoctk_data + ports: + - "5000:5000" + restart: unless-stopped + pandexo: + build: + context: ./pandexo + dockerfile: Dockerfile + args: + - pandexo_version=3.0.0 + - python_version=3.11 + - pandeia_version=3.2 + - has_exo=$HAS_EXO + - has_pandeia=$HAS_PANDEIA + - has_syn=$HAS_SYN + volumes: + - exoctk_data:/data/exoctk + - pandeia_data:/data/pandeia + - synphot_data:/data/trds + ports: + - "1111:1111" + restart: unless-stopped +volumes: + exoctk_data: + driver: local + driver_opts: + type: none + o: bind + device: $EXOCTK_DATA + pandeia_data: + driver: local + driver_opts: + type: none + o: bind + device: $pandeia_refdata + synphot_data: + driver: local + driver_opts: + type: none + o: bind + device: $PYSYN_CDBS diff --git a/docker/dockerfiles/DataDockerfileExoctk b/docker/dockerfiles/DataDockerfileExoctk new file mode 100644 index 000000000..d5e2b5bcf --- /dev/null +++ b/docker/dockerfiles/DataDockerfileExoctk @@ -0,0 +1,39 @@ +# Create Container +ARG has_exo +FROM alpine AS base + +# Define how to load exoctk data if a copy is already present locally +FROM base AS copy +WORKDIR /tmp/data +ADD . /tmp/data/ + +# Define how to load exoctk data by download +FROM base AS download +WORKDIR /tmp/data +ADD https://data.science.stsci.edu/redirect/JWST/ExoCTK/compressed/modelgrid_ACES_1.tar.gz modelgrid_ACES_1.tar.gz +RUN tar zxf modelgrid_ACES_1.tar.gz +RUN mv modelgrid.ACES_1 modelgrid +ADD https://data.science.stsci.edu/redirect/JWST/ExoCTK/compressed/modelgrid_ACES_2.tar.gz modelgrid_ACES_2.tar.gz +RUN tar zxf modelgrid_ACES_2.tar.gz +RUN mv modelgrid.ACES_2/* ./modelgrid/ +WORKDIR /tmp/data/modelgrid +ADD https://data.science.stsci.edu/redirect/JWST/ExoCTK/compressed/modelgrid_ATLAS9.tar.gz modelgrid_ATLAS9.tar.gz +RUN tar zxf modelgrid_ATLAS9.tar.gz +RUN mv modelgrid.ATLAS9 ATLAS9 +WORKDIR /tmp/data +ADD https://data.science.stsci.edu/redirect/JWST/ExoCTK/compressed/generic.tar.gz generic.tar.gz +RUN tar zxf generic.tar.gz +ADD https://data.science.stsci.edu/redirect/JWST/ExoCTK/compressed/fortney.tar.gz fortney.tar.gz +RUN tar zxf fortney.tar.gz +ADD https://data.science.stsci.edu/redirect/JWST/ExoCTK/compressed/groups_integrations.tar.gz groups_integrations.tar.gz +RUN tar zxf groups_integrations.tar.gz +ADD https://data.science.stsci.edu/redirect/JWST/ExoCTK/compressed/exoctk_contam.tar.gz exoctk_contam.tar.gz +RUN tar zxf exoctk_contam.tar.gz + +# And now combine together and do whichever one the environment tells us to +FROM ${has_exo} AS final + +RUN mv /tmp/data /data +WORKDIR /data +CMD ["/bin/sh", "echo", "'Built from ${has_exo}'"] +# CMD ["tail", "-f", "/dev/null"] diff --git a/docker/dockerfiles/DataDockerfilePandeia b/docker/dockerfiles/DataDockerfilePandeia new file mode 100644 index 000000000..57e4d611e --- /dev/null +++ b/docker/dockerfiles/DataDockerfilePandeia @@ -0,0 +1,22 @@ +# Create Container +ARG has_pandeia +FROM alpine AS base + +# Define how to load exoctk data if a copy is already present locally +FROM base AS copy +WORKDIR /tmp/data +ADD . /tmp/data/ + +# Define how to load pandeia data by download +FROM base AS download +WORKDIR /tmp/data +# ADD https://data.science.stsci.edu/redirect/JWST/ExoCTK/compressed/fortney.tar.gz fortney.tar.gz +# RUN tar zxf fortney.tar.gz + +# And now combine together and do whichever one the environment tells us to +FROM ${has_pandeia} AS final + +RUN mv /tmp/data /data +WORKDIR /data +CMD ["/bin/sh", "echo", "'Built from ${has_pandeia}'"] +# CMD ["tail", "-f", "/dev/null"] diff --git a/docker/dockerfiles/DataDockerfileSynphot b/docker/dockerfiles/DataDockerfileSynphot new file mode 100644 index 000000000..d108d527a --- /dev/null +++ b/docker/dockerfiles/DataDockerfileSynphot @@ -0,0 +1,25 @@ +# Create Container +ARG has_syn +FROM alpine AS base + +# Define how to load synphot data if a copy is already present locally +FROM base AS copy +WORKDIR /tmp/data +ADD . /tmp/data/ + +# Define how to load synphot data by download +FROM base AS download +WORKDIR /tmp/data +ADD https://archive.stsci.edu/hlsps/reference-atlases/hlsp_reference-atlases_hst_multi_pheonix-models_multi_v3_synphot5.tar pysyn_data.tar +RUN tar xf pysyn_data.tar +ADD https://archive.stsci.edu/hlsps/reference-atlases/hlsp_reference-atlases_hst_multi_everything_multi_v11_sed.tar pysyn_norm.tar +RUN tar xf pysyn_norm.tar +RUN cp -r /tmp/data/grp/redcat/trds /tmp/data + +# And now combine together and do whichever one the environment tells us to +FROM ${has_syn} AS final + +RUN mv /tmp/data /data +WORKDIR /data +CMD ["/bin/sh", "echo", "'Built from ${has_syn}'"] +# CMD ["tail", "-f", "/dev/null"] diff --git a/docker/exoctk/Dockerfile b/docker/exoctk/Dockerfile new file mode 100644 index 000000000..112c4ec58 --- /dev/null +++ b/docker/exoctk/Dockerfile @@ -0,0 +1,49 @@ +# syntax=docker/dockerfile:1 + +# Get python version +ARG python_version + +# Create Container +FROM python:${python_version}-alpine + +# Get remaining arguments +ARG exoctk_version + +# set up working directory +WORKDIR /exoctk + +# Set up container environment +ENV EXOCTK_DATA="/exoctk/exoctk_data/" +ENV MODELGRID_DIR="/exoctk/exoctk_data/modelgrid/" +ENV EXOCTK_VERSION=${exoctk_version} + +# Install necessary base software +RUN apk update +RUN echo "http://dl-8.alpinelinux.org/alpine/edge/community" >> /etc/apk/repositories +RUN apk --no-cache --update-cache add gcc gfortran build-base wget freetype-dev libpng-dev openblas-dev git hdf5-dev + +# Install numpy with custom batman +WORKDIR /exoctk +RUN pip install --force-reinstall --no-cache-dir numpy==2.2.6 +RUN pip uninstall batman-package +RUN git clone https://github.com/lkreidberg/batman.git +WORKDIR /exoctk/batman +RUN pip install . +WORKDIR /exoctk + +# Install very specific versions of packages +RUN pip install svo-filters==0.4.4 + +# Clone exoctk repository +RUN git clone https://github.com/ExoCTK/exoctk.git +WORKDIR /exoctk/exoctk +RUN git checkout $EXOCTK_VERSION +RUN pip install -e . + +# Set up the port that exoctk will listen on +EXPOSE 5000 + +# Run exoctk +WORKDIR /exoctk +# CMD ["tail", "-f", "/dev/null"] +CMD ["python", "/exoctk/exoctk/exoctk/exoctk_app/app_exoctk.py"] diff --git a/docker/pandexo/Dockerfile b/docker/pandexo/Dockerfile new file mode 100644 index 000000000..600e49962 --- /dev/null +++ b/docker/pandexo/Dockerfile @@ -0,0 +1,49 @@ +# syntax=docker/dockerfile:1 + +# Get python version and create final container +ARG python_version +FROM python:${python_version}-alpine + +ENV FORTGRID_DIR=/data/exoctk/fortney +ENV pandeia_refdata=/data/pandeia +ENV PYSYN_CDBS=/data/trds + +# Get remaining arguments +ARG pandexo_version +ARG pandeia_version + +# set up working directory +WORKDIR /pandexo + +# Set up container environment +ENV PANDEXO_VERSION=${pandexo_version} +ENV PANDEIA_VERSION=${pandeia_version} + +# Install necessary base software +RUN apk update +RUN echo "http://dl-8.alpinelinux.org/alpine/edge/community" >> /etc/apk/repositories +RUN apk --no-cache --update-cache add gcc gfortran build-base wget freetype-dev libpng-dev openblas-dev hdf5-dev git +RUN apk --no-cache --update-cache add linux-headers + +# Install numpy with custom batman +RUN pip install --force-reinstall --no-cache-dir numpy==2.2.6 +RUN pip uninstall batman-package +RUN git clone https://github.com/lkreidberg/batman +WORKDIR /pandexo/batman +RUN pip install . +WORKDIR /pandexo + +# Install pandexo +RUN pip install numpy==1.26.4 +RUN pip install scipy==1.14.1 +# ***** NOTE ***** +# In the future, it would be useful to be able to either directly install this +# with pip (i.e. pip install pandexo==VERSION), or do a git checkout of a given +# version (i.e. git checkout VERSION) +RUN git clone --recursive https://github.com/natashabatalha/PandExo.git +WORKDIR /pandexo/PandExo +RUN pip install . +RUN pip install pandeia.engine==${pandeia_version} + +WORKDIR /pandexo +CMD ["start_pandexo", "--workers=10"] diff --git a/docker/redis.yaml b/docker/redis.yaml new file mode 100644 index 000000000..eddb2fa32 --- /dev/null +++ b/docker/redis.yaml @@ -0,0 +1,4 @@ +services: + redis: + image: "redis:alpine" + restart: unless-stopped diff --git a/exoctk/__init__.py b/exoctk/__init__.py index 609265e93..67df66156 100644 --- a/exoctk/__init__.py +++ b/exoctk/__init__.py @@ -15,3 +15,4 @@ from . import limb_darkening from . import phase_constraint_overlap from . import log_exoctk +from . import _version diff --git a/exoctk/_version.py b/exoctk/_version.py new file mode 100644 index 000000000..b9131647d --- /dev/null +++ b/exoctk/_version.py @@ -0,0 +1,2 @@ +# Explicit version number +__version__ = '1.2.6.4' \ No newline at end of file diff --git a/exoctk/contam_visibility/field_simulator.py b/exoctk/contam_visibility/field_simulator.py index f987c1c25..51574a52d 100755 --- a/exoctk/contam_visibility/field_simulator.py +++ b/exoctk/contam_visibility/field_simulator.py @@ -7,11 +7,15 @@ from copy import copy from functools import partial import glob -from multiprocessing import pool, cpu_count +from multiprocessing import pool, cpu_count, set_start_method import os import re +import json import time from pkg_resources import resource_filename +import logging +import datetime +from urllib.parse import quote_plus import astropy.coordinates as crd from astropy.io import fits @@ -26,6 +30,7 @@ from astroquery.xmatch import XMatch from astroquery.gaia import Gaia from bokeh.plotting import figure, show +from bokeh.embed import json_item from bokeh.layouts import gridplot, column from bokeh.models import Range1d, LinearColorMapper, LogColorMapper, Label, ColorBar, ColumnDataSource, HoverTool, Slider, CustomJS, VArea, CrosshairTool, TapTool, OpenURL, Span, Legend from bokeh.palettes import PuBu, Spectral6 @@ -34,36 +39,256 @@ import numpy as np import pysiaf import regions +import h5py -from ..utils import get_env_variables, check_for_data +from ..utils import get_env_variables, check_for_data, add_array_at_position, replace_NaNs from .new_vis_plot import build_visibility_plot, get_exoplanet_positions -from .contamination_figure import contam +from . import contamination_figure as cf +from exoctk import utils + +try: + set_start_method('spawn') +except RuntimeError: + pass + +import warnings +warnings.filterwarnings("ignore", message="Mean of empty slice") + +log_file = 'contam_tool.log' +logging.basicConfig( + filename=log_file, + filemode='w', # <-- THIS forces overwrite on every run + level=logging.INFO, + format='%(asctime)s %(message)s', + force=True +) + +_last_time = datetime.datetime.now() +def log_checkpoint(message): + global _last_time + now = datetime.datetime.now() + elapsed = (now - _last_time).total_seconds() + logging.info(f'{message} (Elapsed: {elapsed:.2f} seconds)') + _last_time = now + +def parse_log(): + timestamps = [] + messages = [] + with open(log_file, 'r') as f: + for line in f: + parts = line.strip().split(' ', 1) + if len(parts) == 2: + timestamps.append(parts[0]) + messages.append(parts[1]) + + for ts, msg in zip(timestamps, messages): + print(f'{ts}: {msg}') Vizier.columns = ["**", "+_r"] Gaia.MAIN_GAIA_TABLE = "gaiaedr3.gaia_source" # DR2 is default catalog Gaia.ROW_LIMIT = 100 -from exoctk import utils - -APERTURES = {'NIS_SOSSFULL': {'inst': 'NIRISS', 'full': 'NIS_SOSSFULL', 'scale': 0.065, 'rad': 2.5, 'lam': [0.8, 2.8], 'subarr_x': [0, 2048, 2048, 0], 'subarr_y':[0, 0, 2048, 2048], 'trim': [127, 126, 252, 1]}, - 'NIS_SUBSTRIP96': {'inst': 'NIRISS', 'full': 'NIS_SOSSFULL', 'scale': 0.065, 'rad': 2.5, 'lam': [0.8, 2.8], 'subarr_x': [0, 2048, 2048, 0], 'subarr_y':[1792, 1792, 1888, 1888], 'trim': [47, 46, 0, 1]}, - 'NIS_SUBSTRIP256': {'inst': 'NIRISS', 'full': 'NIS_SOSSFULL', 'scale': 0.065, 'rad': 2.5, 'lam': [0.8, 2.8], 'subarr_x': [0, 2048, 2048, 0], 'subarr_y':[1792, 1792, 2048, 2048], 'trim': [127, 126, 0, 1]}, +APERTURES = {'NIS_SOSSFULL': {'inst': 'NIRISS', 'full': 'NIS_SOSSFULL', 'scale': 0.065, 'rad': 2.5, 'lam': [0.8, 2.8], + 'c0x0': 905, 'c0y0': 1467, 'c1x0': -0.013, 'c1y0': -0.1, 'c1y1': 0.12, 'c1x1': -0.03, 'c2y1': -0.011, + 'subarr_x': [0, 2048, 2048, 0], 'subarr_y':[0, 0, 2048, 2048], 'trim': [127, 126, 252, 1], + 'lft': 700, 'rgt': 3022, 'top': 2050, 'bot': 1400, 'blue_ext': -150, 'red_ext': 200, + 'xord0to1': -2886, 'yord0to1': 68, 'empirical_scale': [1, 1.5, 1.5, 1.5], + 'cutoffs': [2048, 1820, 1130], 'trace_names': ['Order 1', 'Order 2', 'Order 3'], + 'coeffs': [[1.68975801e-11, -4.60822060e-08, 4.94623886e-05, -5.93935390e-02, 8.67263818e+01], + [3.95721278e-11, -7.40683643e-08, 6.88340922e-05, -3.68009540e-02, 1.06704335e+02], + [1.06699517e-11, 3.36931077e-08, 1.45570667e-05, 1.69277607e-02, 1.45254339e+02]]}, + 'NIS_SUBSTRIP96': {'inst': 'NIRISS', 'full': 'NIS_SOSSFULL', 'scale': 0.065, 'rad': 2.5, 'lam': [0.8, 2.8], + 'c0x0': 905, 'c0y0': 1467, 'c1x0': -0.013, 'c1y0': -0.1, 'c1y1': 0.12, 'c1x1': -0.03, 'c2y1': -0.011, + 'subarr_x': [0, 2048, 2048, 0], 'subarr_y':[1792, 1792, 1888, 1888], 'trim': [47, 46, 0, 1], + 'lft': 700, 'rgt': 3022, 'top': 2050, 'bot': 1400, 'blue_ext': -150, 'red_ext': 200, + 'xord0to1': -2886, 'yord0to1': 68, 'empirical_scale': [1, 1.5, 1.5, 1.5], + 'cutoffs': [2048, 1820, 1130], 'trace_names': ['Order 1', 'Order 2', 'Order 3'], + 'coeffs': [[1.68975801e-11, -4.60822060e-08, 4.94623886e-05, -5.93935390e-02, 8.67263818e+01], + [3.95721278e-11, -7.40683643e-08, 6.88340922e-05, -3.68009540e-02, 1.06704335e+02], + [1.06699517e-11, 3.36931077e-08, 1.45570667e-05, 1.69277607e-02, 1.45254339e+02]]}, + 'NIS_SUBSTRIP256': {'inst': 'NIRISS', 'full': 'NIS_SOSSFULL', 'scale': 0.065, 'rad': 2.5, 'lam': [0.8, 2.8], + 'c0x0': 905, 'c0y0': 1467, 'c1x0': -0.013, 'c1y0': -0.1, 'c1y1': 0.12, 'c1x1': -0.03, 'c2y1': -0.011, + 'subarr_x': [0, 2048, 2048, 0], 'subarr_y':[1792, 1792, 2048, 2048], 'trim': [127, 126, 0, 1], + 'lft': 700, 'rgt': 3022, 'top': 2050, 'bot': 1400, 'blue_ext': -150, 'red_ext': 200, + 'xord0to1': -2886, 'yord0to1': 68, 'empirical_scale': [1, 1.5, 1.5, 1.5], + 'cutoffs': [2048, 1820, 1130], 'trace_names': ['Order 1', 'Order 2', 'Order 3'], + 'coeffs': [[1.68975801e-11, -4.60822060e-08, 4.94623886e-05, -5.93935390e-02, 8.67263818e+01], + [3.95721278e-11, -7.40683643e-08, 6.88340922e-05, -3.68009540e-02, 1.06704335e+02], + [1.06699517e-11, 3.36931077e-08, 1.45570667e-05, 1.69277607e-02, 1.45254339e+02]]}, + 'NRCA5_40STRIPE1_DHS_F322W2': {'inst': 'NIRCam', 'full': 'NRCA5_FULL', 'scale': 0.031, 'rad': 2.5, 'lam': [0.8, 2.8], + 'subarr_x': [0, 4257, 4257, 0], 'subarr_y': [1064, 1064, 3192, 3192], 'trim': [0, 1, 0, 1], + 'c0x0': 1800, 'c0y0': 2116, 'c1x0': 0, 'c1y0': 0, 'c1y1': 0.12, 'c1x1': -0.03, 'c2y1': -0.011, + 'lft': 0, 'rgt': 4300, 'top': 4000, 'bot': 0, 'blue_ext': 0, 'red_ext': 0, + 'xord0to1': -2300, 'yord0to1': -2116, 'empirical_scale': [1.] * 11, + 'cutoffs': [3324]*10, 'trace_names': ['DHS5', 'DHS4', 'DHS3', 'DHS2', 'DHS1', 'DHS6', 'DHS7', 'DHS8', 'DHS9', 'DHS10'], + 'coeffs': [[5.31914894e-03, 2.65331915e+03], [5.31914894e-03, 2.54031915e+03], + [5.31914894e-03, 2.42031915e+03], [5.31914894e-03, 2.28931915e+03], + [5.31914894e-03, 2.15831915e+03], [5.31914894e-03, 2.03531915e+03], + [3.54609929e-03, 1.92521277e+03], [3.54609929e-03, 1.81121277e+03], + [4.43262411e-03, 1.68126596e+03], [5.31914894e-03, 1.54531915e+03]]}, + 'NRCA5_40STRIPE1_DHS_F444W': {'inst': 'NIRCam', 'full': 'NRCA5_FULL', 'scale': 0.031, 'rad': 2.5, 'lam': [0.8, 2.8], + 'subarr_x': [0, 4257, 4257, 0], 'subarr_y':[1064, 1064, 3192, 3192], 'trim': [0, 1, 0, 1], + 'c0x0': 900, 'c0y0': 2116, 'c1x0': 0, 'c1y0': 0, 'c1y1': 0.12, 'c1x1': -0.03, 'c2y1': -0.011, + 'lft': 0, 'rgt': 4300, 'top': 4000, 'bot': 0, 'blue_ext': 0, 'red_ext': 0, + 'xord0to1': -2000, 'yord0to1': -2116, 'empirical_scale': [1.] * 11, + 'cutoffs': [3324]*10, 'trace_names': ['DHS5', 'DHS4', 'DHS3', 'DHS2', 'DHS1', 'DHS6', 'DHS7', 'DHS8', 'DHS9', 'DHS10'], + 'coeffs': [[5.31914894e-03, 2.65331915e+03], [5.31914894e-03, 2.54031915e+03], + [5.31914894e-03, 2.42031915e+03], [5.31914894e-03, 2.28931915e+03], + [5.31914894e-03, 2.15831915e+03], [5.31914894e-03, 2.03531915e+03], + [3.54609929e-03, 1.92521277e+03], [3.54609929e-03, 1.81121277e+03], + [4.43262411e-03, 1.68126596e+03], [5.31914894e-03, 1.54531915e+03]]}, 'NRCA5_GRISM256_F277W': {'inst': 'NIRCam', 'full': 'NRCA5_FULL', 'scale': 0.063, 'rad': 2.5, 'lam': [2.395, 3.179], 'trim': [0, 1, 0, 1]}, 'NRCA5_GRISM256_F322W2': {'inst': 'NIRCam', 'full': 'NRCA5_FULL', 'scale': 0.063, 'rad': 2.5, 'lam': [2.413, 4.083], 'trim': [0, 1, 0, 1]}, 'NRCA5_GRISM256_F356W': {'inst': 'NIRCam', 'full': 'NRCA5_FULL', 'scale': 0.063, 'rad': 2.5, 'lam': [3.100, 4.041], 'trim': [0, 1, 0, 1]}, 'NRCA5_GRISM256_F444W': {'inst': 'NIRCam', 'full': 'NRCA5_FULL', 'scale': 0.063, 'rad': 2.5, 'lam': [3.835, 5.084], 'trim': [0, 1, 1250, 1]}, 'MIRIM_SLITLESSPRISM': {'inst': 'MIRI', 'full': 'MIRIM_FULL', 'scale': 0.11, 'rad': 2.0, 'lam': [5, 12], 'trim': [6, 5, 0, 1]}} +DHS_STRIPES = {'NRCA5_40STRIPE1_DHS_F322W2': {'DHS5': {'x0': 2196, 'x1': 3324, 'y0': 2665, 'y1': 2671}, + 'DHS4': {'x0': 2196, 'x1': 3324, 'y0': 2552, 'y1': 2558}, + 'DHS3': {'x0': 2196, 'x1': 3324, 'y0': 2432, 'y1': 2438}, + 'DHS2': {'x0': 2196, 'x1': 3324, 'y0': 2301, 'y1': 2307}, + 'DHS1': {'x0': 2196, 'x1': 3324, 'y0': 2170, 'y1': 2176}, + 'DHS6': {'x0': 2196, 'x1': 3324, 'y0': 2047, 'y1': 2053}, + 'DHS7': {'x0': 2196, 'x1': 3324, 'y0': 1933, 'y1': 1937}, + 'DHS8': {'x0': 2196, 'x1': 3324, 'y0': 1819, 'y1': 1823}, + 'DHS9': {'x0': 2196, 'x1': 3324, 'y0': 1691, 'y1': 1696}, + 'DHS10': {'x0': 2196, 'x1': 3324, 'y0': 1557, 'y1': 1563}}, + 'NRCA5_40STRIPE1_DHS_F444W': {'DHS5': {'x0': 2196, 'x1': 3324, 'y0': 2665, 'y1': 2671}, + 'DHS4': {'x0': 2196, 'x1': 3324, 'y0': 2552, 'y1': 2558}, + 'DHS3': {'x0': 2196, 'x1': 3324, 'y0': 2432, 'y1': 2438}, + 'DHS2': {'x0': 2196, 'x1': 3324, 'y0': 2301, 'y1': 2307}, + 'DHS1': {'x0': 2196, 'x1': 3324, 'y0': 2170, 'y1': 2176}, + 'DHS6': {'x0': 2196, 'x1': 3324, 'y0': 2047, 'y1': 2053}, + 'DHS7': {'x0': 2196, 'x1': 3324, 'y0': 1933, 'y1': 1937}, + 'DHS8': {'x0': 2196, 'x1': 3324, 'y0': 1819, 'y1': 1823}, + 'DHS9': {'x0': 2196, 'x1': 3324, 'y0': 1691, 'y1': 1696}, + 'DHS10': {'x0': 2196, 'x1': 3324, 'y0': 1557, 'y1': 1563}}, + } + # Gaia color-Teff relation GAIA_TEFFS = np.asarray(np.genfromtxt(resource_filename('exoctk', 'data/contam_visibility/predicted_gaia_colour.txt'), unpack=True)) -# SOSS order 1/2/3 trace coefficients derived from commissioning data -SOSS_TRACE_COEFFS = [[1.68975801e-11, -4.60822060e-08, 4.94623886e-05, -5.93935390e-02, 8.67263818e+01], - [3.95721278e-11, -7.40683643e-08, 6.88340922e-05, -3.68009540e-02, 1.06704335e+02], - [1.06699517e-11, 3.36931077e-08, 1.45570667e-05, 1.69277607e-02, 1.45254339e+02]] +def load_exoplanet(filename, exoplanet_name): + """ + Load exoplanet data from HDF5 and reconstruct full contamination cube. + + Returns: + dict with keys: + - target_trace: (256, 2048) + - contamination: full (360, 256, 2048) array with zeros in skipped planes + - plane_index: indices of stored planes + """ + grp_name = exoplanet_name.strip().replace("/", "_") + + with h5py.File(filename, "r") as f: + if grp_name not in f: + raise KeyError(f"Exoplanet '{exoplanet_name}' not found in {filename}") + + grp = f[grp_name] + + # Load target trace + target_trace = grp["target_trace"][:, :] + + # Initialize full contamination cube + full_contam = np.zeros((360, 256, 2048), dtype=np.float32) + + # Load stored planes + plane_index = grp["plane_index"][:] + stored_planes = grp["contamination"][:, :, :] + + # Put stored planes in their original locations + full_contam[plane_index, :, :] = stored_planes + + data = {"target_trace": target_trace, "contamination": full_contam, "plane_index": plane_index} + + return data + + +def NIRCam_DHS_trace_mask(aperture, gap_value=0, ref_value=0, substripe_value=1, det_value=0, combined=False, plot=False): + """ + Construct a trace mask for NIRCam DHS mode + + Parameters + ---------- + aperture: str + The sperture to use, [ + gap_value: float + The value in the detector gap + ref_value: float + The value of the reference pixels + substripe_value: float + The value in the mask area + combined: bool + Return a single flattened image + plot: bool + Plot the final array + + Returns + ------- + list, array + The final array or list of arrays for each trace + """ + # The DHS uses four detectors + starting = 0 # pixel + ref_pixels = 8 # pixel + det_size_full = 2048 # pixel + det_size = det_size_full - ref_pixels # pixel + + # DHS has 2 field points configuration and each field points can be paired to 6 filter position + # Depending on the field point/filter combination, the spectra will not fall on the same part of the 4 detectors nor have the same length + stripes = DHS_STRIPES[aperture] + pixel_scale = 0.031 # arcsec/pixel (on sky) + gap_fov = 5 # arcsec + gap = int(gap_fov / pixel_scale) # pixel + + # full = np.ones((det_size_full + det_size_full + gap, det_size_full + det_size_full + gap)) + full = np.ones((det_size_full + det_size_full + gap, det_size_full + det_size_full + gap)) * det_value + + # now let's populate the full array with the corresponding gap, reference or substripe locations + full[det_size_full:det_size_full + gap, :] = gap_value + full[:, det_size_full:det_size_full + gap] = gap_value + + # Add reference pixels + full[starting:det_size_full, starting:ref_pixels] = ref_value + full[det_size_full + gap:, starting:ref_pixels] = ref_value + full[starting:det_size_full, det_size:det_size_full] = ref_value + full[det_size_full + gap:, det_size:det_size_full] = ref_value + full[starting:det_size_full, det_size_full + gap:det_size_full + gap + ref_pixels] = ref_value + full[det_size_full + gap:, det_size_full + gap:det_size_full + gap + ref_pixels] = ref_value + full[starting:det_size_full, det_size_full + gap + det_size:] = ref_value + full[det_size_full + gap:, det_size_full + gap + det_size:] = ref_value + + full[starting:ref_pixels, starting:det_size_full] = ref_value + full[starting:ref_pixels, det_size_full + gap:] = ref_value + full[det_size:det_size_full, starting:det_size_full] = ref_value + full[det_size:det_size_full, det_size_full + gap:] = ref_value + full[det_size_full + gap:det_size_full + gap + ref_pixels, starting:det_size_full] = ref_value + full[det_size_full + gap:det_size_full + gap + ref_pixels, det_size_full + gap:] = ref_value + full[det_size_full + gap + det_size:, starting:det_size_full] = ref_value + full[det_size_full + gap + det_size:, det_size_full + gap:] = ref_value + + # For the specific field point and filter specified above: + traces = [] + for stripe in stripes.values(): + if combined: + full[stripe['y0']:stripe['y1'], stripe['x0']:stripe['x1']] += substripe_value + else: + trace = copy(full) + trace[stripe['y0']:stripe['y1'], stripe['x0']:stripe['x1']] = substripe_value + traces.append(trace) + + if plot: + plt = figure(width=1000, height=1000) + plt.image([full], x=0, y=0, dw=full.shape[0], dh=full.shape[1]) + for stripe in stripes.values(): + plt.line([stripe['x0'], stripe['x1']], [(stripe['y0']+stripe['y1'])/2.]*2) + show(plt) + + return full[1064:3192, :] if combined else [trace[1064:3192] for trace in traces] -def SOSS_trace_mask(aperture, radius=20): + +def NIRISS_SOSS_trace_mask(aperture, radius=20): """ Construct a trace mask for SOSS data @@ -77,7 +302,7 @@ def SOSS_trace_mask(aperture, radius=20): np.ndarray The SOSS trace mask """ - traces = np.array([np.polyval(coeff, np.arange(2048)) for coeff in SOSS_TRACE_COEFFS]) + traces = np.array([np.polyval(coeff, np.arange(2048)) for coeff in APERTURES[aperture]['coeffs']]) ydim = APERTURES[aperture]['subarr_y'][2] - APERTURES[aperture]['subarr_y'][1] mask1 = np.zeros((ydim, 2048)) mask2 = np.zeros((ydim, 2048)) @@ -106,34 +331,6 @@ def SOSS_trace_mask(aperture, radius=20): return mask1, mask2, mask3 -def trace_dict(teffs=None): - """ - Load the trace data for all the given Teff values into a dictionary - - Parameters - ---------- - teffs: sequence - The teff values to fetch - - Returns - ------- - dict - The trace data for the given Teff values - """ - teff_dict = {} - - if teffs is None: - teffs = np.arange(2000, 12100, 100) - - # Make sure they're ints - teffs = [int(teff) for teff in teffs] - - for teff in teffs: - teff_dict[teff] = get_trace('NIS_SUBSTRIP256', teff, 'STAR', verbose=False) - - return teff_dict - - def find_sources(ra, dec, width=7.5*u.arcmin, catalog='Gaia', target_date=Time.now(), verbose=False, pm_corr=True): """ Find all the stars in the vicinity and estimate temperatures @@ -185,7 +382,7 @@ def find_sources(ra, dec, width=7.5*u.arcmin, catalog='Gaia', target_date=Time.n pass # Or infer galaxy from parallax - stars['type'] = ['STAR' if row['parallax'] > 0.5 else 'GALAXY' for row in stars] + stars['type'] = [classify_source(row) for row in stars] # Derived from K. Volk stars['Teff'] = [GAIA_TEFFS[0][(np.abs(GAIA_TEFFS[1] - row['bp_rp'])).argmin()] for row in stars] @@ -229,8 +426,7 @@ def find_sources(ra, dec, width=7.5*u.arcmin, catalog='Gaia', target_date=Time.n # Add URL (before PM correcting coordinates) search_radius = 1 - urls = ['https://vizier.u-strasbg.fr/viz-bin/VizieR-5?-ref=VIZ62fa613b20f3fc&-out.add=.&-source={}&-c={}%20%2b{},eq=ICRS,rs={}&-out.orig=o'.format(cat, ra_deg, dec_deg, search_radius) for ra_deg, dec_deg in zip(stars['ra'], stars['dec'])] - # urls = ['https://vizier.cds.unistra.fr/viz-bin/VizieR-S?Gaia+EDR3+{}'.format(source_id) for source_id in stars['source_id']] + urls = ['https://vizier.u-strasbg.fr/viz-bin/VizieR-5?-ref=VIZ62fa613b20f3fc&-out.add=.&-source={}&-c={}&eq=ICRS&rs={}&-out.orig=o'.format(cat, quote_plus(f"{ra_deg} {dec_deg}"), search_radius) for ra_deg, dec_deg in zip(stars['ra'], stars['dec'])] stars.add_column(urls, name='url') # Cope coordinates to new column @@ -256,14 +452,6 @@ def find_sources(ra, dec, width=7.5*u.arcmin, catalog='Gaia', target_date=Time.n # Add detector location to the table stars.add_columns(np.zeros((10, len(stars))), names=['xtel', 'ytel', 'xdet', 'ydet', 'xsci', 'ysci', 'xord0', 'yord0', 'xord1', 'yord1']) - # Get traces - traces = trace_dict(teffs=np.unique(stars['Teff'])) - stars.add_column([np.zeros((256, 2048))] * len(stars), name='trace_o1') - stars.add_column([np.zeros((256, 2048))] * len(stars), name='trace_o2') - stars.add_column([np.zeros((256, 2048))] * len(stars), name='trace_o3') - for star in stars: - star['trace_o1'], star['trace_o2'], star['trace_o3'] = traces[int(star['Teff'])] - return stars @@ -311,10 +499,10 @@ def calculate_current_coordinates(ra, dec, pm_ra, pm_dec, epoch, target_date=Tim new_ra = ra + (pm_RA_deg_per_year * dt_years / np.cos(np.deg2rad(dec))) new_dec = dec + (pm_Dec_deg_per_year * dt_years) - # if verbose: - # print(f"delta_T = {dt_years} years") - # print(f'RA: {ra} + {pm_ra} mas/yr => {new_ra}') - # print(f'Dec: {dec} + {pm_dec} mas/yr => {new_dec}') + if verbose: + print(f"delta_T = {dt_years} years") + print(f'RA: {ra} + {pm_ra} mas/yr => {new_ra}') + print(f'Dec: {dec} + {pm_dec} mas/yr => {new_dec}') return new_ra, new_dec @@ -365,19 +553,54 @@ def add_source(startable, name, ra, dec, teff=None, fluxscale=None, delta_mag=No ra = newcoord.ra.degree dec = newcoord.dec.degree - # Get the trace - trace = get_trace('NIS_SUBSTRIP256', teff or 2300, type, verbose=False) - # Add the row to the table - startable.add_row({'name': name, 'designation': name, 'ra': ra, 'dec': dec, 'obs_ra': ra, 'obs_dec': dec, 'Teff': teff, 'fluxscale': fluxscale, 'type': type, 'distance': dist, 'trace_o1': trace[0], 'trace_o2': trace[1], 'trace_o3': trace[2]}) + startable.add_row({'name': name, 'designation': name, 'ra': ra, 'dec': dec, 'obs_ra': ra, 'obs_dec': dec, 'Teff': teff, 'fluxscale': fluxscale, 'type': type, 'distance': dist}) startable.sort('distance') return startable -def calc_v3pa(V3PA, stars, aperture, data=None, x_sweet=2885, y_sweet=1725, c0x0=905, c0y0=1467, - c1x0=-0.013, c1y0=-0.1, c1y1=0.12, c1x1=-0.03, c2y1=-0.011, tilt=0, ord0scale=1, - ord1scale=1, plot=False, verbose=False): +def classify_source(row, verbose=False): + """ + Classify a Gaia EDR3 source as STAR or GALAXY based on proxy criteria. + + Parameters + ---------- + row : astropy.table.Row + A single row from an Astropy Table with Gaia EDR3 columns. + verbose: bool + Print helpful stuff + + Returns + ------- + str + 'STAR' if the source appears to be stellar, 'GALAXY' otherwise. + """ + # Default is STAR + stype = 'STAR' + + # Check to see if GALAXY + if hasattr(row['parallax'], 'mask'): + + if hasattr(row['astrometric_excess_noise'], 'mask'): + if row['astrometric_excess_noise'] > 1.0: + stype = 'GALAXY' + + if hasattr(row['phot_bp_rp_excess_factor'], 'mask') and hasattr(row['bp_rp'], 'mask'): + if row['phot_bp_rp_excess_factor'] > (1.0 + 0.015 * row['bp_rp']**2): + stype = 'GALAXY' + + else: + if row['parallax'] < 0.5: + stype = 'GALAXY' + + if verbose: + print(stype, row['parallax'], row['astrometric_excess_noise'], row['phot_bp_rp_excess_factor']) + + return stype + + +def calc_v3pa(V3PA, stars, aperture, data=None, tilt=0, plot=False, verbose=False, logging=True, POM=False): """ Calculate the V3 position angle for each target at the given PA @@ -391,40 +614,23 @@ def calc_v3pa(V3PA, stars, aperture, data=None, x_sweet=2885, y_sweet=1725, c0x0 The aperture object for the given mode data: sequence (optional) The data to use instead of making a simulation (to check accuracy or ID sources) - x_sweet: int - The x-axis location of the target on the detector - y_sweet: int - The y-axis location of the target on the detector - c0x0: float - The zeroth coefficient for the x translation of order 0 traces - c0y0: float - The zeroth coefficient for the y translation of order 0 traces - c1x0: float - The first coefficient for the x translation of order 0 traces - c1y0: float - The first coefficient for the y translation of order 0 traces - c1x1: float - The first coefficient for the x translation of order 1/2/3 traces - c1y1: float - The first coefficient for the y translation of order 1/2/3 traces - c2y1: float - The second coefficient for the y translation of order 1/2/3 traces - tilt: float - The tilt of the frame relative to nominal position (stand-in for PWCPOS) - ord0scale: float - A factor to scale the order 0 traces by (for testing) - ord1scale: float - A factor to scale the order 1/2/3 traces by (for testing) plot: bool Plot the full frame and subarray bounds with all traces verbose: bool Print statements + POM: bool + Show the pick off mirror footprint in the plot Returns ------- targframe, starframe The frame containing the target trace and a frame containing all contaminating star traces """ + if logging: + open('contam_tool.log', 'w').close() + start_time = datetime.datetime.now() + log_checkpoint(f'Logging in {log_file}...') + if verbose: print("Checking PA={} with {} stars in the vicinity".format(V3PA, len(stars['ra']))) @@ -450,6 +656,9 @@ def calc_v3pa(V3PA, stars, aperture, data=None, x_sweet=2885, y_sweet=1725, c0x0 aperture.minrow, aperture.maxrow = rows.min(), rows.max() aperture.mincol, aperture.maxcol = cols.min(), cols.max() + if logging: + log_checkpoint('Loaded aperture info from pySIAF.') + # Get APA from V3PA APA = V3PA + aperture.V3IdlYAngle if APA > 360: @@ -467,12 +676,14 @@ def calc_v3pa(V3PA, stars, aperture, data=None, x_sweet=2885, y_sweet=1725, c0x0 stars['xsci'][0], stars['ysci'][0] = aperture.det_to_sci(stars['xdet'][0], stars['ydet'][0]) # Order 0 location of target relative to pysiaf SCI coordinates - stars['xord0'][0] = int(stars['xsci'][0] + c0x0) - stars['yord0'][0] = int(stars['ysci'][0] + c0y0) - stars['xord1'][0] = stars['xord0'][0] - x_sweet + aper['subarr_x'][0] - stars['yord1'][0] = stars['yord0'][0] - y_sweet + aper['subarr_y'][1] + stars['xord0'][0] = int(stars['xsci'][0] + aper['c0x0']) + stars['yord0'][0] = int(stars['ysci'][0] + aper['c0y0']) + + # Order 1 location of target relative to order 0 + stars['xord1'][0] = stars['xord0'][0] + aper['xord0to1'] + stars['yord1'][0] = stars['yord0'][0] + aper['yord0to1'] - # Get target's attitude matrix for each Position Angle + # Get target's attitude matrix for each` Position Angle attitude = pysiaf.utils.rotations.attitude_matrix(stars['xtel'][0], stars['ytel'][0], stars['ra'][0], stars['dec'][0], APA) # Get relative coordinates of the stars based on target attitude @@ -491,220 +702,143 @@ def calc_v3pa(V3PA, stars, aperture, data=None, x_sweet=2885, y_sweet=1725, c0x0 # Get the DET coordinates of the star star['xsci'], star['ysci'] = aperture.det_to_sci(star['xdet'], star['ydet']) - # Order 0 location relative to pysiaf SCI coordinates - star['xord0'] = int(star['xsci'] + c0x0 + c1x0 * (stars['xsci'][0] - star['xsci'])) - star['yord0'] = int(star['ysci'] + c0y0 + c1y0 * (stars['ysci'][0] - star['ysci'])) + # Order 0 location relative to pysiaf SCI coordinates (with distortion corrections) + star['xord0'] = int(star['xsci'] + aper['c0x0'] + aper['c1x0'] * (stars['xsci'][0] - star['xsci'])) + star['yord0'] = int(star['ysci'] + aper['c0y0'] + aper['c1y0'] * (stars['ysci'][0] - star['ysci'])) - # Order 1/2/3 location relative to order 0 location - # x_shift = int(c1x0 + c1x1 * (stars[0]['xord0'] - star['xord0'])) - # y_shift = int(c1y0 + c1y1 * (stars[0]['yord0'] - star['yord0']) - c1x1 * (stars[0]['xord0'] - star['xord0'])) - # star['xord1'] = star['xord0'] - x_sweet + aper['subarr_x'][0] + x_shift - # star['yord1'] = star['yord0'] - y_sweet + aper['subarr_y'][1] + y_shift - x_shift = int(c1x1 * (stars[0]['xord0'] - star['xord0'])) - y_shift = int(c1y1 * (stars[0]['yord0'] - star['yord0'])) + int(c2y1 * (stars[0]['xord0'] - star['xord0'])) - star['xord1'] = star['xord0'] - x_sweet + aper['subarr_x'][0] + x_shift - star['yord1'] = star['yord0'] - y_sweet + aper['subarr_y'][1] + y_shift + # Order 1/2/3 location relative to order 0 location (with distortion corrections) + x_shift = int(aper['c1x1'] * (stars[0]['xord0'] - star['xord0'])) + y_shift = int(aper['c1y1'] * (stars[0]['yord0'] - star['yord0'])) + int(aper['c2y1'] * (stars[0]['xord0'] - star['xord0'])) + star['xord1'] = star['xord0'] + aper['xord0to1'] + x_shift + star['yord1'] = star['yord0'] + aper['yord0to1'] + y_shift + + if logging: + log_checkpoint(f'Calculated target and {len(stars)-1} source sci coordinates.') # Just sources in FOV (Should always have at least 1, the target) - lft, rgt, top, bot = 700, 5100, 2050, 1400 + lft, rgt, top, bot = aper['lft'], aper['rgt'], aper['top'], aper['bot'] FOVstars = stars[(lft < stars['xord0']) & (stars['xord0'] < rgt) & (bot < stars['yord0']) & (stars['yord0'] < top)] + # Get the traces for sources in the FOV and add the column to the source table + star_traces = [get_trace(aperture.AperName, temp, typ, verbose=False) for temp, typ in zip(FOVstars['Teff'], FOVstars['type'])] + FOVstars['traces'] = star_traces + # Remove Teff value for GALAXY type FOVstars['Teff'] = [np.nan if t == 'GALAXY' else i for i, t in zip(FOVstars['Teff'], FOVstars['type'])] FOVstars['Teff_str'] = ['---' if t == 'GALAXY' else str(int(i)) for i, t in zip(FOVstars['Teff'], FOVstars['type'])] + if logging: + log_checkpoint(f'Found {len(FOVstars)} sources in the FOV.') + if verbose: print("Calculating contamination from {} other sources in the FOV".format(len(FOVstars) - 1)) # Make frame for the target and a frame for all the other stars - targframe_o1 = np.zeros((subY, subX)) - targframe_o2 = np.zeros((subY, subX)) - targframe_o3 = np.zeros((subY, subX)) + n_traces = len(star_traces[0]) + targframes = [np.zeros((subY, subX))] * n_traces starframe = np.zeros((subY, subX)) - # Get order 0 - order0 = get_order0(aperture.AperName) * 1.5e8 # Scaling factor based on observations + # Get trace masks + trace_masks = NIRCam_DHS_trace_mask(aperture.AperName) if 'NRCA5' in aperture.AperName else NIRISS_SOSS_trace_mask(aperture.AperName) - # SOSS trace masks - mask1, mask2, mask3 = SOSS_trace_mask(aperture.AperName) + if logging: + log_checkpoint(f'Fetched arrays, masks, and traces...') # Iterate over all stars in the FOV and add their scaled traces to the correct frame for idx, star in enumerate(FOVstars): - # Scale the order 0 image and get dims - scale0 = copy(order0) * star['fluxscale'] * ord0scale - dim0y, dim0x = scale0.shape - dim0y0 = int(dim0y / 2) - dim0y1 = dim0y - dim0y0 - dim0x0 = int(dim0x / 2) - dim0x1 = dim0x - dim0x0 - - # Locations of the order 0 pixels on the subarray - f0x0, f1x0 = int(max(aper['subarr_x'][0], star['xord0'] - dim0x0)), int(min(aper['subarr_x'][1], star['xord0'] + dim0x1)) - f0y0, f1y0 = int(max(aper['subarr_y'][1], star['yord0'] - dim0y0)), int(min(aper['subarr_y'][2], star['yord0'] + dim0y1)) - - if 0 < f1x0 - f0x0 <= dim0x and 0 < f1y0 - f0y0 <= dim0y: - - # How many pixels of the order 0 image fall on the subarray - t0x0 = dim0x - (f1x0 - f0x0) if f0x0 == aper['subarr_x'][0] else 0 - t1x0 = f1x0 - f0x0 if f1x0 == aper['subarr_x'][1] else dim0x - t0y0 = dim0y - (f1y0 - f0y0) if f0y0 == aper['subarr_y'][0] else 0 - t1y0 = f1y0 - f0y0 if f1y0 == aper['subarr_y'][2] else dim0y - - # if verbose: - # print("{} x {} pixels of star {} order 0 fall on {}".format(t1y0 - t0y0, t1x0 - t0x0, idx, aperture.AperName)) - - # Target order 0 is never on the subarray so add all order 0s to the starframe - starframe[f0y0 - aper['subarr_y'][1]:f1y0 - aper['subarr_y'][1], f0x0 - aper['subarr_x'][1]:f1x0 - aper['subarr_x'][0]] += scale0[t0y0:t1y0, t0x0:t1x0] - - # Higher Orders ============================================================================ - - # Get the appropriate trace - trace_o1 = star['trace_o1'] - trace_o2 = star['trace_o2'] - trace_o3 = star['trace_o3'] - - # Orient trace if need be - if 'NIS' in aperture.AperName: - trace_o1 = np.rot90(trace_o1.T[:, ::-1] * 1.5, k=1) # Scaling factor based on observations - trace_o2 = np.rot90(trace_o2.T[:, ::-1] * 1.5, k=1) # Scaling factor based on observations - trace_o3 = np.rot90(trace_o3.T[:, ::-1] * 1.5, k=1) # Scaling factor based on observations - - # Pad or trim SUBSTRIP256 simulation for SUBSTRIP96 or FULL frame - if aperture.AperName == 'NIS_SOSSFULL': - trace_o1 = np.pad(trace_o1, ((1792, 0), (0, 0)), 'constant') - trace_o2 = np.pad(trace_o2, ((1792, 0), (0, 0)), 'constant') - trace_o3 = np.pad(trace_o3, ((1792, 0), (0, 0)), 'constant') - elif aperture.AperName == 'NIS_SUBSTRIP96': - trace_o1 = trace_o1[:96, :] - trace_o2 = trace_o2[:96, :] - trace_o3 = trace_o3[:96, :] - - # Get the trace and shift into the correct subarray position - # trace *= mask1 + mask2 + mask3 - - # Scale the order 1, 2, 3 image and get dims - trace_o1 *= star['fluxscale'] * ord1scale - if aperture.AperName.startswith('NIS'): - trace_o2 *= star['fluxscale'] * ord1scale - trace_o3 *= star['fluxscale'] * ord1scale - dimy, dimx = trace_o1.shape - - # Func to remove background - # def bkg(trc, cutoff=0.00001): - # trc[trc < 0] = cutoff - # trc[np.isnan(trc)] = cutoff - # trc[np.isinf(trc)] = cutoff - # return trc - - # Trim background - # trace_o1 = bkg(trace_o1) - # if aperture.AperName.startswith('NIS'): - # trace_o2 = bkg(trace_o2) - # trace_o3 = bkg(trace_o3) - - # Location of full trace footprint - fpx0 = int(star['xord1']) - fpx1 = int(fpx0 + dimx) - fpy0 = int(star['yord1']) - fpy1 = int(fpy0 + dimy) - - # Locations of the trace pixels on the subarray - f0x, f1x = max(aper['subarr_x'][0], fpx0), min(aper['subarr_x'][1], fpx1) - f0y, f1y = max(aper['subarr_y'][1], fpy0), min(aper['subarr_y'][2], fpy1) - - # print(idx, f0x, f1x, f0y, f1y) - if 0 < f1x - f0x <= dimx and 0 < f1y - f0y <= dimy: - - # How many pixels of the trace image fall on the subarray - t0x = dimx - (f1x - f0x) if f0x == aper['subarr_x'][0] else 0 - t1x = f1x - f0x if f1x == aper['subarr_x'][1] else dimx - t0y = dimy - (f1y - f0y) if f0y == aper['subarr_y'][0] else 0 - t1y = f1y - f0y if f1y == aper['subarr_y'][2] else dimy + # Scale the traces for this source + traces = [trace * star['fluxscale'] * aper['empirical_scale'][n + 1] for n, trace in enumerate(star['traces'])] - if verbose: - print("{} x {} pixels of star {} trace fall on {}".format(t1y - t0y, t1x - t0x, idx, aperture.AperName)) + # Add each target trace to it's own frame + if idx == 0: - # Add each order to it's own frame - if idx == 0: + for n, trace in enumerate(traces): - targframe_o1[f0y - aper['subarr_y'][1]:f1y - aper['subarr_y'][1], f0x - aper['subarr_x'][1]:f1x - aper['subarr_x'][0]] += trace_o1[t0y:t1y, t0x:t1x] - if aperture.AperName.startswith('NIS'): - targframe_o2[f0y - aper['subarr_y'][1]:f1y - aper['subarr_y'][1], f0x - aper['subarr_x'][1]:f1x - aper['subarr_x'][0]] += trace_o2[t0y:t1y, t0x:t1x] - targframe_o3[f0y - aper['subarr_y'][1]:f1y - aper['subarr_y'][1], f0x - aper['subarr_x'][1]:f1x - aper['subarr_x'][0]] += trace_o3[t0y:t1y, t0x:t1x] + # Assumes the lower lft corner of the trace is in the lower left corner of the 'targframe' array + targframes[n] = add_array_at_position(targframes[n], trace, 0, 0) - # Add all orders to the same frame (if it is a STAR) - else: + # Add all orders to the same frame (if it is a STAR) + else: - # NOTE: Take this conditional out if you want to see galaxy traces! - if star['type'] == 'STAR': + # Get correct order 0 + order0 = get_order0(aperture.AperName, star['Teff'], stype=star['type'], verbose=verbose) * 1.5e3 # Scaling factor based on observations - starframe[f0y - aper['subarr_y'][1]:f1y - aper['subarr_y'][1], f0x - aper['subarr_x'][1]:f1x - aper['subarr_x'][0]] += trace_o1[t0y:t1y, t0x:t1x] - if aperture.AperName.startswith('NIS'): - starframe[f0y - aper['subarr_y'][1]:f1y - aper['subarr_y'][1], f0x - aper['subarr_x'][1]:f1x - aper['subarr_x'][0]] += trace_o2[t0y:t1y, t0x:t1x] - starframe[f0y - aper['subarr_y'][1]:f1y - aper['subarr_y'][1], f0x - aper['subarr_x'][1]:f1x - aper['subarr_x'][0]] += trace_o3[t0y:t1y, t0x:t1x] + # Scale the order 0 image and add it to the starframe + scale0 = copy(order0) * star['fluxscale'] * aper['empirical_scale'][0] + starframe = add_array_at_position(starframe, scale0, int(star['xord0'] - aper['subarr_x'][0]), int(star['yord0'] - aper['subarr_y'][1]), centered=True) - # Contam per order - simframe_o1 = targframe_o1 + starframe - simframe_o2 = targframe_o2 + starframe - simframe_o3 = targframe_o3 + starframe - simframe = targframe_o1 + targframe_o2 + targframe_o3 + starframe + # NOTE: Take this conditional out if you want to see galaxy traces! + if star['type'] == 'STAR': + for trace in traces: + starframe = add_array_at_position(starframe, trace, int(star['xord1'] - stars['xord1'][0]), int(star['yord1'] - stars['yord1'][0])) - # Calculate contam/total counts in each detector column - pctframe_o1 = starframe / simframe_o1 - pctframe_o2 = starframe / simframe_o2 - pctframe_o3 = starframe / simframe_o3 - pctline_o1 = np.nanmean(pctframe_o1 * mask1, axis=0) - pctline_o2 = np.nanmean(pctframe_o2 * mask2, axis=0) - pctline_o3 = np.nanmean(pctframe_o3 * mask3, axis=0) + if logging: + log_checkpoint(f'Added {len(FOVstars)} sources to the simulated frames.') - result = {'pa': V3PA, 'target': targframe_o1 + targframe_o2 + targframe_o3, 'target_o1': targframe_o1, 'target_o2': targframe_o2, 'target_o3': targframe_o3, 'contaminants': starframe, 'sources': FOVstars, 'order1_contam': pctline_o1, 'order2_contam': pctline_o2, 'order3_contam': pctline_o3} + # Adding frames together + simframes = [tframe + starframe for tframe in targframes] + simframe = np.sum(targframes, axis=0) + starframe + pctframes = [np.divide(starframe, sframe, out=np.full_like(starframe, np.nan), where=(sframe != 0) & ~np.isnan(sframe)) for sframe in simframes] + pctlines = [] + for i, (pframe, mask) in enumerate(zip(pctframes, trace_masks)): + masked = pframe * mask + with np.errstate(invalid='ignore', divide='ignore'): + mean_line = np.nanmean(masked, axis=0) + pctlines.append(mean_line) + + # Make results dict + result = {'pa': V3PA, 'target': np.sum(targframes, axis=0), 'target_traces': targframes, + 'contaminants': starframe, 'sources': FOVstars, 'contam_levels': pctlines} + + if logging: + log_checkpoint('Compiled final results.') if plot: # Make the plot tools = ['pan', 'reset', 'box_zoom', 'wheel_zoom', 'save'] - tips = [('Name', '@name'), ('Type', '@type'), ('RA', '@ra'), ('DEC', '@dec'), ('order', '@order{int}'), ('scale', '@fluxscale'), ('Teff [K]', '@Teff_str'), ('distance [mas]', '@distance')] - fig = figure(title='Generated FOV from Gaia EDR3', width=900, height=max(subY, 120), match_aspect=True, tools=tools) + tips = [('Name', '@name'), ('Type', '@type'), ('RA', '@ra'), ('DEC', '@dec'), ('trace', '@trace'), + ('scale', '@fluxscale'), ('Teff [K]', '@Teff_str'), ('distance [mas]', '@distance')] + fig = figure(title='Generated FOV from Gaia EDR3', width=900, height=450 if inst['inst'] == 'NIRCam' else max(subY, 120), match_aspect=True, tools=tools) fig.title = '({}, {}) at PA={} in {}'.format(stars[0]['ra'], stars[0]['dec'], V3PA, aperture.AperName) # Plot config scale = 'log' color_map = 'Viridis256' - # Plot the obs data if possible + # Plot the obs data if possible... if data is not None: - imgsource = ColumnDataSource(data={'data': [data]}) - vmax = np.nanmax(data) - if scale == 'log': - mapper = LogColorMapper(palette=color_map, low=1, high=vmax) - else: - mapper = LinearColorMapper(palette=color_map, low=0, high=vmax) - data[data < 0] = 0 - data = rotate(data, tilt) - fig.image(image='data', x=0, y=2048 - data.shape[0], dh=data.shape[0], dw=2048, color_mapper=mapper, source=imgsource) - - # Plot the simulated frame - vmax = np.nanmax(simframe) - if scale == 'log': - mapper = LogColorMapper(palette=color_map, low=1, high=vmax) - else: - mapper = LinearColorMapper(palette=color_map, low=0, high=vmax) + simframe = data - # Only plot the simulation if no data is available to plot - if data is None: - imgsource = ColumnDataSource(data={'sim': [simframe]}) - fig.image(image='sim', x=aper['subarr_x'][0], dw=subX, y=aper['subarr_y'][1], dh=subY, color_mapper=mapper, source=imgsource, name="image") + # Replace negatives + simframe[simframe < 0] = 0 - mapper = linear_cmap(field_name='Teff', palette=Spectral6, low=np.nanmin(FOVstars['Teff']), high=np.nanmax(FOVstars['Teff'])) + # Rotate for PWCPOS + simframe = rotate(simframe, tilt) + + # Plot the image data or simulation + vmax = np.nanmax(simframe) + mapper = LogColorMapper(palette=color_map, low=1, high=vmax) if scale == 'log' else LinearColorMapper(palette=color_map, low=0, high=vmax) + imgsource = ColumnDataSource(data={'sim': [simframe]}) + fig.image(image='sim', x=aper['subarr_x'][0], dw=subX, y=aper['subarr_y'][1], dh=subY, source=imgsource, name="image", color_mapper=mapper) + + # Plot the detector gaps and reference pixels for visual inspection + refframe = NIRCam_DHS_trace_mask(aperture.AperName, substripe_value=0, ref_value=1, gap_value=1, combined=True) if 'NRCA5' in aperture.AperName else np.zeros((subY, subX)) + refsource = ColumnDataSource(data={'ref': [refframe]}) + fig.image(image='ref', x=aper['subarr_x'][0], dw=subX, y=aper['subarr_y'][1], dh=subY, source=refsource, name="ref", color_mapper=LinearColorMapper(palette=["white", "black"], low=0, high=1), alpha=0.1) # Plot order 0 locations of stars FOVstars_only = FOVstars[FOVstars['type'] == 'STAR'] source0_stars = ColumnDataSource(data={'Teff_str': FOVstars_only['Teff_str'], 'distance': FOVstars_only['distance'], 'xord0': FOVstars_only['xord0'], 'yord0': FOVstars_only['yord0'], 'ra': FOVstars_only['ra'], 'dec': FOVstars_only['dec'], 'name': FOVstars_only['name'], 'type': FOVstars_only['type'], 'url': FOVstars_only['url'], 'fluxscale': FOVstars_only['fluxscale'], - 'order': [0] * len(FOVstars_only)}) - order0_stars = fig.circle('xord0', 'yord0', color='red', size=20, line_width=3, fill_color=None, name='order0', source=source0_stars) + 'trace': ['Order 0'] * len(FOVstars_only)}) + order0_stars = fig.scatter('xord0', 'yord0', color='red', size=20, line_width=3, fill_color=None, name='order0', source=source0_stars) + + # Plot the POM footprint + if POM: + fig.varea(x=[lft, rgt], y1=[bot, bot], y2=[top, top], fill_color='blue', fill_alpha=0.1) # Plot order 0 locations of galaxies FOVstars_gal = FOVstars[FOVstars['type'] == 'GALAXY'] @@ -715,47 +849,42 @@ def calc_v3pa(V3PA, stars, aperture, data=None, x_sweet=2885, y_sweet=1725, c0x0 'yord0': FOVstars_gal['yord0'], 'ra': FOVstars_gal['ra'], 'dec': FOVstars_gal['dec'], 'name': FOVstars_gal['name'], 'type': FOVstars_gal['type'], 'url': FOVstars_gal['url'], 'fluxscale': FOVstars_gal['fluxscale'], - 'order': [0] * len(FOVstars_gal)}) - order0_gal = fig.circle('xord0', 'yord0', color='pink', size=20, line_width=3, fill_color=None, name='order0', + 'trace': ['Order 0'] * len(FOVstars_gal)}) + order0_gal = fig.scatter('xord0', 'yord0', color='pink', size=20, line_width=3, fill_color=None, name='order0', source=source0_gal) - # Plot the sweet spot - fig.circle([stars[0]['xord0']], [stars[0]['yord0']], size=8, line_width=3, fill_color=None, line_color='black') - - # Trace extends in dispersion direction further than 2048 subarray edges - blue_ext = 150 - red_ext = 200 + # Plot the target order 0 + fig.scatter([stars[0]['xord0']], [stars[0]['yord0']], size=8, line_width=3, fill_color=None, line_color='black') - # Get the new x-ranges - xr0 = np.linspace(-blue_ext, 2048 + red_ext, 1000) - xr1 = np.linspace(-blue_ext, 1820 + red_ext, 1000) - xr2 = np.linspace(-blue_ext, 1130 + red_ext, 1000) - - # Add the y-intercept to the c0 coefficient - polys = SOSS_TRACE_COEFFS - yr0 = np.polyval(polys[0], xr0) - yr1 = np.polyval(polys[1], xr1) - yr2 = np.polyval(polys[2], xr2) + # Get the nominal x and y values for the trace centroids + n_pts = 1000 + x_ranges = [np.linspace(inst['blue_ext'], inst['cutoffs'][n] + inst['red_ext'], n_pts) for n in np.arange(n_traces)] + y_ranges = [np.polyval(inst['coeffs'][n], xr) for n, xr in enumerate(x_ranges)] lines = [] for idx, star in enumerate(FOVstars): - # Order 1/2/3 location relative to order 0 - for order in [1, 2, 3]: - source = ColumnDataSource(data={'x1': xr0 + star['xord1'], 'y1': yr0 + star['yord1'], - 'x2': xr1 + star['xord1'], 'y2': yr1 + star['yord1'], - 'x3': xr2 + star['xord1'], 'y3': yr2 + star['yord1'], - 'name': ['Target' if idx == 0 else star['designation']] * len(xr0), - 'type': [star['type']] * len(xr0), - 'ra': [star['ra']] * len(xr0), 'dec': [star['dec']] * len(xr0), - 'fluxscale': [star['fluxscale']] * len(xr0), - 'Teff_str': [star['Teff_str']] * len(xr0), - 'distance': [star['distance']] * len(xr0), - 'order': [order] * len(xr0), - 'url': [star['url']] * len(xr0) - }) - - line = fig.line('x{}'.format(order), 'y{}'.format(order), source=source, color='pink' if star['type'] == 'GALAXY' else 'red', name='traces', line_dash='solid' if idx == 0 else 'dashed', width=3 if idx == 0 else 1) + # Trace locations relative to order 0 + for trx in np.arange(n_traces): + + # Make the base dict for this star + data_dict = {'name': ['Target' if idx == 0 else star['designation']] * n_pts, + 'type': [star['type']] * n_pts, + 'ra': [star['ra']] * n_pts, + 'dec': [star['dec']] * n_pts, + 'fluxscale': [star['fluxscale']] * n_pts, + 'Teff_str': [star['Teff_str']] * n_pts, + 'distance': [star['distance']] * n_pts, + 'trace': [aper['trace_names'][trx]] * n_pts, + 'url': [star['url']] * n_pts} + + # Add the offset traces for this star + for n in np.arange(n_traces): + data_dict[f'x{n}'] = x_ranges[n] + star['xord1'] + data_dict[f'y{n}'] = y_ranges[n] + star['yord1'] + + source = ColumnDataSource(data=data_dict) + line = fig.line('x{}'.format(trx), 'y{}'.format(trx), source=copy(source), color='pink' if star['type'] == 'GALAXY' else 'red', name='traces', line_dash='solid' if idx == 0 else 'dashed', width=3 if idx == 0 else 1) lines.append(line) # Add order 0 hover and taptool @@ -769,25 +898,24 @@ def calc_v3pa(V3PA, stars, aperture, data=None, x_sweet=2885, y_sweet=1725, c0x0 fig.add_tools(TapTool(behavior='select', name='traces', callback=OpenURL(url="@url"))) # Show the figure - fig.x_range = Range1d(aper['subarr_x'][0], aper['subarr_x'][1]) - fig.y_range = Range1d(aper['subarr_y'][1], aper['subarr_y'][2]) + pad = 20 + fig.x_range = Range1d(aper['subarr_x'][0] - pad, aper['subarr_x'][1] + pad) + fig.y_range = Range1d(aper['subarr_y'][1] - pad, aper['subarr_y'][2] + pad) # Source for ratio plot - rsource = ColumnDataSource(data=dict(x=np.arange(subX), zeros=np.zeros(subX), o1=pctline_o1, o2=pctline_o2, o3=pctline_o3)) + data = {f'pct_{n}': pct for n, pct in enumerate(pctlines)} + data.update({'x': np.arange(subX), 'zeros': np.zeros(subX)}) + rsource = ColumnDataSource(data=data) # Make plot rfig = figure(title='Target Contamination', width=900, height=200, match_aspect=True, tools=tools, x_range=fig.x_range) - rfig.line(np.arange(subX), pctline_o1, color='blue', legend_label='Order 1') - glyph1 = VArea(x='x', y1='zeros', y2='o1', fill_color="blue", fill_alpha=0.3) - rfig.add_glyph(rsource, glyph1) - if aperture.AperName not in ['NIS_SUBSTRIP96']: - rfig.line(np.arange(subX), pctline_o2, color='red', legend_label='Order 2') - glyph2 = VArea(x='x', y1='zeros', y2='o2', fill_color="red", fill_alpha=0.3) - rfig.add_glyph(rsource, glyph2) - rfig.line(np.arange(subX), pctline_o3, color='green', legend_label='Order 3') - glyph3 = VArea(x='x', y1='zeros', y2='o3', fill_color="green", fill_alpha=0.3) - rfig.add_glyph(rsource, glyph3) - rfig.y_range = Range1d(0, 1)#min(1, max(pctline_o1.max(), pctline_o2.max(), pctline_o3.max()))) + colors = ['blue', 'red', 'green', 'cyan', 'dodgerblue', 'purple', 'orange', 'lime', 'yellow', 'magenta'] + trace_names = inst['trace_names'] + for n in np.arange(n_traces): + rfig.line('x', f'pct_{n}', color=colors[n], legend_label=trace_names[n], source=copy(rsource)) + glyph = VArea(x='x', y1='zeros', y2=f'pct_{n}', fill_color=colors[n], fill_alpha=0.3) + rfig.add_glyph(copy(rsource), glyph) + rfig.y_range = Range1d(0, 1) #min(1, max(pctline_o1.max(), pctline_o2.max(), pctline_o3.max()))) rfig.yaxis.axis_label = 'Contam / Total Counts' rfig.xaxis.axis_label = 'Detector Column' @@ -798,54 +926,17 @@ def calc_v3pa(V3PA, stars, aperture, data=None, x_sweet=2885, y_sweet=1725, c0x0 # Plot grid gp = gridplot([[fig], [rfig]]) + if logging: + log_checkpoint(f'Finished making plot of size {len(json.dumps(json_item(gp))) / 1e6:.2f}MB') + log_checkpoint(f'Total calculation time: {(datetime.datetime.now() - start_time).total_seconds():.2f}s') + return result, gp return result -def plot_traces(star_table, fig, color='red'): - """ - PLot the trace locations of all the stars in the table - - Parameters - ---------- - star_table: astropy.table.Table - The table of stars - fig: bokeh.plotting.figure.Figure - The figure to plot on - - Returns - ------- - fig - The figure - """ - - # Trace extends in dispersion direction further than 2048 subarray edges - blue_ext = 150 - red_ext = 200 - - # Get the new x-ranges - xr0 = np.linspace(-blue_ext, 2048 + red_ext, 1000) - xr1 = np.linspace(-blue_ext, 1820 + red_ext, 1000) - xr2 = np.linspace(-blue_ext, 1130 + red_ext, 1000) - - # Add the y-intercept to the c0 coefficient - polys = SOSS_TRACE_COEFFS - yr0 = np.polyval(polys[0], xr0) - yr1 = np.polyval(polys[1], xr1) - yr2 = np.polyval(polys[2], xr2) - - for idx, star in enumerate(star_table): - # Order 1/2/3 location relative to order 0 - fig.line(xr0 + star['xord1'], yr0 + star['yord1'], color=color, line_dash='solid' if idx == 0 else 'dashed') - fig.line(xr1 + star['xord1'], yr1 + star['yord1'], color=color, line_dash='solid' if idx == 0 else 'dashed') - fig.line(xr2 + star['xord1'], yr2 + star['yord1'], color=color, line_dash='solid' if idx == 0 else 'dashed') - - return fig - - -def field_simulation(ra, dec, aperture, binComp=None, target_date=Time.now(), n_jobs=-1, plot=False, multi=True, verbose=True): - +def field_simulation(ra, dec, aperture, binComp=None, target_date=Time.now(), n_jobs=-1, interpolate=False, plot=False, + title='My Target', multi=True, verbose=True): """Produce a contamination field simulation at the given sky coordinates Parameters @@ -862,6 +953,10 @@ def field_simulation(ra, dec, aperture, binComp=None, target_date=Time.now(), n_ The target epoch year of the observation, e.g. '2025' n_jobs: int Number of cores to use (-1 = All) + interpolate: bool + Skip every other PA and interpolate to speed up calculation + plot: bool + Return a plot Returns ------- @@ -878,6 +973,11 @@ def field_simulation(ra, dec, aperture, binComp=None, target_date=Time.now(), n_ ra, dec = 91.872242, -25.594934 targframe, starcube, results = fs.field_simulation(ra, dec, 'NIS_SUBSTRIP256') """ + # Initialize logging + open('contam_tool.log', 'w').close() + start_time = datetime.datetime.now() + log_checkpoint(f'Logging in {log_file}...') + # Check for contam tool data check_for_data('exoctk_contam') @@ -905,7 +1005,7 @@ def field_simulation(ra, dec, aperture, binComp=None, target_date=Time.now(), n_ aper.mincol, aper.maxcol = cols.min(), cols.max() # Find stars in the vicinity - stars = find_sources(ra, dec, target_date=target_date, verbose=verbose) + stars = find_sources(ra, dec, target_date=target_date, verbose=False) # Add stars manually if isinstance(binComp, dict): @@ -916,6 +1016,8 @@ def field_simulation(ra, dec, aperture, binComp=None, target_date=Time.now(), n_ if n_jobs == -1 or n_jobs > max_cores: n_jobs = max_cores + log_checkpoint(f'Found {len(stars)} sources in the neighborhood') + # Get full list from ephemeris ra_hms, dec_dms = re.sub('[a-z]', ':', targetcrd.to_string('hmsdms')).split(' ') goodPAs = get_exoplanet_positions(ra_hms, dec_dms, in_FOR=True) @@ -939,12 +1041,18 @@ def field_simulation(ra, dec, aperture, binComp=None, target_date=Time.now(), n_ good_group_bounds = [(min(grp), max(grp)) for grp in good_groups] goodPA_list = np.concatenate([np.arange(grp[0], grp[1]+1) for grp in good_group_bounds]).ravel() + # Try to speed it up by doing every other visible PA and then interpolating + # if interpolate: + + + log_checkpoint(f'Found {len(goodPA_ints)}/360 visible position angles to check') + # Flatten list and check against 360 angles to get all bad PAs # badPA_list = [pa for pa in pa_list if pa not in goodPA_list] # Time it if verbose: - print('Calculating target contamination from {} neighboring sources in position angle ranges {}...'.format(len(stars), good_group_bounds)) + print('Calculating target contamination from {} neighboring sources in position angle ranges {}...'.format(len(stars), [(int(rng[0]), int(rng[1])) for rng in good_group_bounds])) start = time.time() # Calculate contamination of all stars at each PA @@ -956,7 +1064,7 @@ def field_simulation(ra, dec, aperture, binComp=None, target_date=Time.now(), n_ # And by multiprocessing end them? if multi: pl = pool.ThreadPool(n_jobs) - func = partial(calc_v3pa, stars=stars, aperture=aper, plot=False, verbose=False) + func = partial(calc_v3pa, stars=stars, aperture=aper, plot=False, verbose=False, logging=False) results = pl.map(func, goodPA_list) pl.close() pl.join() @@ -964,17 +1072,16 @@ def field_simulation(ra, dec, aperture, binComp=None, target_date=Time.now(), n_ else: results = [] for pa in goodPA_list: - result = calc_v3pa(pa, stars=stars, aperture=aper, plot=False, verbose=False) + result = calc_v3pa(pa, stars=stars, aperture=aper, plot=False, verbose=False, logging=False) results.append(result) + log_checkpoint('Finished all calc_v3pa calculations') + # We only need one target frame frames - targframe_o1 = np.asarray(results[0]['target_o1']) - targframe_o2 = np.asarray(results[0]['target_o2']) - targframe_o3 = np.asarray(results[0]['target_o3']) - targframe = [targframe_o1, targframe_o2, targframe_o3] + targframes = [np.asarray(trace) for trace in results[0]['target_traces']] # Make sure starcube is of shape (PA, rows, cols) - starcube = np.zeros((360, targframe_o1.shape[0], targframe_o1.shape[1])) + starcube = np.zeros((360, targframes[0].shape[0], targframes[0].shape[1])) # Make the contamination plot for result in results: @@ -983,11 +1090,29 @@ def field_simulation(ra, dec, aperture, binComp=None, target_date=Time.now(), n_ if verbose: print('Contamination calculation complete: {} {}'.format(round(time.time() - start, 3), 's')) + log_checkpoint('Bundled up all the results.') + # Make contam plot if plot: - contam_slider_plot(results, plot=plot) - return targframe, starcube, results + # Get bad PA list from missing angles between 0 and 360 + badPAs = [j for j in np.arange(0, 360) if j not in goodPA_list] + + # Make old contam plot + starcube_targ = np.zeros((362, 2048, 96 if aperture == 'NIS_SUBSTRIP96' else 256)) + starcube_targ[0, :, :] = (targframes[0]).T[::-1, ::-1] + starcube_targ[1, :, :] = (targframes[1]).T[::-1, ::-1] + starcube_targ[2:, :, :] = starcube.swapaxes(1, 2)[:, ::-1, ::-1] + contam_plot = cf.contam(starcube_targ, aperture, targetName=title, badPAs=badPAs) + + # # Slider plot + # contam_plot = contam_slider_plot(results, plot=False) + + return targframes, starcube, results, contam_plot + + else: + + return targframes, starcube, results def contam_slider_plot(contam_results, threshold=0.05, plot=False): @@ -1110,7 +1235,7 @@ def contam_slider_plot(contam_results, threshold=0.05, plot=False): return layout -def get_order0(aperture): +def get_order0(aperture, teff, stype='STAR', verbose=False): """Get the order 0 image for the given aperture Parameters @@ -1123,21 +1248,39 @@ def get_order0(aperture): np.ndarray The 2D order 0 image """ - # Get file - # TODO: Add order 0 files for other modes - if 'NIS' in aperture: - filename = 'NIS_order0.npy' + if 'DHS' in aperture: + trace = np.zeros((50,50)) + else: - # Get the path to the trace files - trace_path = os.path.join(os.environ['EXOCTK_DATA'], 'exoctk_contam/order0/{}'.format(filename)) + if stype == 'STAR': + + # Get the path to the trace files + traces_path = os.path.join(os.environ['EXOCTK_DATA'], f'exoctk_contam/order0/NIS_order0_*.npy') + + # Glob the file names + trace_files = glob.glob(traces_path) + + # Get closest Teff + teffs = np.array([int(os.path.basename(file).split('_')[-1][:-4]) for file in trace_files]) + trace_file = trace_files[np.argmin((teffs - teff)**2)] + if verbose: + print(f'Fetching {aperture} {teffs[np.argmin((teffs - teff)**2)]}K trace from {trace_file}') - # Make frame - trace = np.load(trace_path) + # Make frame + trace = np.load(trace_file) + + else: + + # Get stand-in for galaxy order 0 + gal_path = os.path.join(os.environ['EXOCTK_DATA'], f'exoctk_contam/order0/NIS_gal_order0.npy') + if verbose: + print('Fetching {} galaxy trace from {}'.format(aperture, gal_path)) + trace = np.load(gal_path) return trace -def get_trace(aperture, teff, stype, verbose=False): +def get_trace(aperture, teff, stype, verbose=False, plot=False): """Get the trace for the given aperture at the given temperature Parameters @@ -1148,14 +1291,27 @@ def get_trace(aperture, teff, stype, verbose=False): The temperature [K] stype: str The source type, ['STAR', 'GALAXY'] + verbose: bool + Print statements + plot: bool + Plot the trace Returns ------- np.ndarray The 2D trace """ + if 'DHS_F322W2' in aperture: + aperpath = 'NRCA5_GRISM256_F322W2' + elif 'DHS_F444W' in aperture: + aperpath = 'NRCA5_GRISM256_F444W' + elif 'NIS' in aperture: + aperpath = 'NIS_SUBSTRIP256' + else: + aperpath = aperture + # Get the path to the trace files - traces_path = os.path.join(os.environ['EXOCTK_DATA'], 'exoctk_contam/traces/{}/*.fits'.format('NIS_SUBSTRIP256' if 'NIS' in aperture else aperture)) + traces_path = os.path.join(os.environ['EXOCTK_DATA'], f'exoctk_contam/traces/{aperpath}/*.fits') # Glob the file names trace_files = glob.glob(traces_path) @@ -1178,23 +1334,43 @@ def get_trace(aperture, teff, stype, verbose=False): traceo2[traceo2 < 1] = 0 traceo3[traceo3 < 1] = 0 + # 1.5 scaling based on comparison with observational data + obs_scale = 1.5 + traces = [traceo1 * obs_scale, traceo2 * obs_scale, traceo3 * obs_scale] + if stype == 'GALAXY': # Just mask trace area - traceo1, traceo2, traceo2 = SOSS_trace_mask(aperture) + traces = NIRISS_SOSS_trace_mask(aperture) + + elif 'NRCA5' in aperture: + + # Get the trace and replace the NaN values + trace = fits.getdata(file)[0] + trace = replace_NaNs(trace) - trace = [traceo1, traceo2, traceo3] + # Put the trace in each of the DHS trace positions + traces = [] + for stripe in DHS_STRIPES[aperture].values(): + y, x = int((stripe['y1'] + stripe['y0']) / 2.), int((stripe['x1'] + stripe['x0']) / 2.) + dhs_trace = add_array_at_position(np.zeros((4257, 4257)), trace, x, y, centered=True) + traces.append(dhs_trace) + + if stype == 'GALAXY': + traces = NIRCam_DHS_trace_mask(aperture, plot=False) + + traces = [trace[APERTURES[aperture]['subarr_y'][1]:APERTURES[aperture]['subarr_y'][2], :] for trace in traces] else: - trace = [fits.getdata(file)] + traces = [fits.getdata(file)] - # # Expand to SUBSTRIP256 to FULL frame for NIS_SOSSFULL - # if aperture == 'NIS_SOSSFULL': - # full_trace = np.zeros((2301, 2301)) - # full_trace[:, -257:] = trace - # trace = full_trace + if plot: + f = figure(width=900, height=450) + final = np.sum(traces, axis=0) + f.image([final], x=APERTURES[aperture]['subarr_x'][0], y=APERTURES[aperture]['subarr_y'][1], dw=final.shape[1], dh=final.shape[0]) + show(f) - return trace + return traces def old_plot_contamination(targframe_o1, targframe_o2, targframe_o3, starcube, wlims, badPAs=[], title=''): @@ -1289,6 +1465,5 @@ def old_plot_contamination(targframe_o1, targframe_o2, targframe_o3, starcube, w if __name__ == '__main__': - ra, dec = "04 25 29.0162", "-30 36 01.603" # Wasp 79 field_simulation(ra, dec, 'NIS_SUBSTRIP256') diff --git a/exoctk/contam_visibility/precompute.py b/exoctk/contam_visibility/precompute.py new file mode 100644 index 000000000..a589c7948 --- /dev/null +++ b/exoctk/contam_visibility/precompute.py @@ -0,0 +1,151 @@ +""" +Module to precompute the contamination for targets, store the data to file, and then retrieve results +with per-chunk zero-value masks. + +Author: Joe Filippazzo +Date: 01/13/26 + +Example: +filename = "soss_contam.h5" +data = load_exoplanet(filename, "WASP-39b") + +print(data["target_trace"].shape) # (256, 2048) +print(data["contamination"].shape) # (360, 256, 2048) +print(len(data["plane_index"])) # number of non-zero planes +""" + +import h5py +import numpy as np +import requests + +from . import field_simulator as fs +from ..utils import get_target_data, get_canonical_name + + +def fetch_exoplanet_names_eaot(): + url = "https://catalogs.mast.stsci.edu/api/v0.1/eaot/search.json" + params = {"columns": "[Planet_Name]", "pagesize": 20000} + + resp = requests.get(url, params=params) + resp.raise_for_status() + data = resp.json() + names = [row[0] for row in data["data"] if row and row[0] is not None] + + return names + + +def generate_exoplanet_file(filename, test=True): + """ + Create HDF5 file with empty datasets for exoplanets. + + - target_trace: (256, 2048) + - contamination: (variable number of planes, 256, 2048), stored only for non-zero planes + - plane_index: which planes are stored + """ + planet_names = fetch_exoplanet_names_eaot() + count = 0 + + with h5py.File(filename, "w") as f: + for name in (planet_names[:3] if test else planet_names): + if name in f: + continue + + grp_name = name.strip().replace("/", "_") + grp = f.create_group(grp_name) + grp.attrs["name"] = name + + # Target trace + grp.create_dataset( + "target_trace", + shape=(3, 256, 2048), + dtype="float32", + compression="gzip", + compression_opts=4, + chunks=(1, 256, 2048), + ) + + # Contamination placeholder (0 planes initially) + grp.create_dataset( + "contamination", + shape=(0, 256, 2048), # variable length along first axis + maxshape=(None, 256, 2048), + dtype="float32", + compression="gzip", + compression_opts=4, + chunks=(1, 256, 2048), + ) + + # Plane index placeholder + grp.create_dataset( + "plane_index", + shape=(0,), + maxshape=(None,), + dtype="int16", + ) + + count += 1 + + print(f"Finished! Saved structure for {count}/{len(planet_names)} exoplanets to {filename}.") + return planet_names + + +def save_exoplanet_data(filename, exoplanet_name, target_trace, contamination): + """ + Save target trace and contamination (only non-zero planes) to HDF5 file. + """ + grp_name = exoplanet_name.strip().replace("/", "_") + + with h5py.File(filename, "r+") as f: + if grp_name not in f: + raise KeyError(f"Exoplanet '{exoplanet_name}' not found in {filename}") + + grp = f[grp_name] + + # --- Target trace (3,256,2048) --- + grp["target_trace"][:, :, :] = target_trace + + # --- Sparse contamination --- + plane_index = np.where(contamination.any(axis=(1, 2)))[0] + + if plane_index.size == 0: + # All-zero contamination + grp["contamination"].resize((0, 256, 2048)) + grp["plane_index"].resize((0,)) + else: + nonzero_planes = contamination[plane_index, :, :] + + # Ensure 3D + if nonzero_planes.ndim == 2: + nonzero_planes = nonzero_planes[np.newaxis, :, :] + + grp["contamination"].resize(nonzero_planes.shape) + grp["contamination"][:, :, :] = nonzero_planes + + grp["plane_index"].resize(plane_index.shape) + grp["plane_index"][:] = plane_index + + grp.attrs["filled"] = True + + print(f"{exoplanet_name} saved ({len(plane_index)} contamination planes)") + + +def generate_database(filename, aperture='NIS_SUBSTRIP256', test=True): + """ + Compute the contamination data and save to the file + """ + # Make the empty file + planet_names = generate_exoplanet_file(filename, test=True) + + for targname in (planet_names[:3] if test else planet_names): + + # Canonical name and get coordinates + targname = get_canonical_name(targname) + data, _ = get_target_data(targname) + ra_deg = data.get('RA') + dec_deg = data.get('DEC') + + # Run contamination tool + target_traces, contamination, _ = fs.field_simulation(ra_deg, dec_deg, aperture, plot=False) + + # Save data to file with mask and plane index + save_exoplanet_data(filename, targname, target_traces, contamination) diff --git a/exoctk/exoctk_app/app_exoctk.py b/exoctk/exoctk_app/app_exoctk.py index a12e8ca70..292e206f1 100644 --- a/exoctk/exoctk_app/app_exoctk.py +++ b/exoctk/exoctk_app/app_exoctk.py @@ -463,24 +463,20 @@ def contam_visibility(): # Add a companion companion = None if comp_teff is not None and comp_mag is not None and comp_dist is not None and comp_pa is not None: - companion = {'name': 'Companion', 'ra': ra_deg, 'dec': dec_deg, 'teff': comp_teff, 'delta_mag': comp_mag, 'dist': comp_dist, 'pa': comp_pa} + companion = {'name': 'Companion', 'ra': ra_deg, 'dec': dec_deg, 'teff': comp_teff, + 'delta_mag': comp_mag, 'dist': comp_dist, 'pa': comp_pa} # Make field simulation - targframe, starcube, results = fs.field_simulation(ra_deg, dec_deg, form.inst.data, target_date=form.epoch.data, binComp=companion, plot=False, multi=False) + targframe, starcube, results, contam_plot = fs.field_simulation(ra_deg, dec_deg, form.inst.data, + target_date=form.epoch.data, + binComp=companion, + plot=True, + multi=False, + title=form.targname.data) # Make the plot # contam_plot = fs.contam_slider_plot(results) - # Get bad PA list from missing angles between 0 and 360 - badPAs = [j for j in np.arange(0, 360) if j not in [i['pa'] for i in results]] - - # Make old contam plot - starCube = np.zeros((362, 2048, 96 if form.inst.data=='NIS_SUBSTRIP96' else 256)) - starCube[0, :, :] = (targframe[0]).T[::-1, ::-1] - starCube[1, :, :] = (targframe[1]).T[::-1, ::-1] - starCube[2:, :, :] = starcube.swapaxes(1, 2)[:, ::-1, ::-1] - contam_plot = cf.contam(starCube, form.inst.data, targetName=form.targname.data, badPAs=badPAs) - else: # Get stars diff --git a/exoctk/notebooks/contamination_and_visibility_SOSS.ipynb b/exoctk/notebooks/contamination_and_visibility_SOSS.ipynb new file mode 100644 index 000000000..ae2327b67 --- /dev/null +++ b/exoctk/notebooks/contamination_and_visibility_SOSS.ipynb @@ -0,0 +1,295 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0", + "metadata": {}, + "source": [ + "# ExoCTK Contamination and Visibility Tool Demo\n", + "This notebook contains general instructions on how to run the Contamination and Visibility Tool as well as some validation tests with on-sky data. The Web application for this tool can also be found at https://exoctk.stsci.edu/contam_visibility." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1", + "metadata": {}, + "outputs": [], + "source": [ + "# Imports\n", + "from astropy.io import fits\n", + "from astroquery.mast import Observations\n", + "from exoctk.contam_visibility import field_simulator as fs\n", + "from exoctk.contam_visibility import contamination_figure as cf\n", + "from bokeh.plotting import show\n", + "from bokeh.io import output_notebook\n", + "from hotsoss.plotting import plot_frame\n", + "# output_notebook()" + ] + }, + { + "cell_type": "markdown", + "id": "2", + "metadata": {}, + "source": [ + "## Using the Contamination and Visibility Tool\n", + "The Contamination and Visibility Tool has three types of outputs, discussed below.\n", + "\n", + "### Simulate a field at a single position angle\n", + "The only necessary inputs to simulate the contamination in a given field at a particular position angle are the target coordinates, the instrument name, and the position angle of interest. We first search the neighborhood for sources with our RA and Dec values." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3", + "metadata": {}, + "outputs": [], + "source": [ + "# Find all sources (including the target) in the field of view\n", + "target = \"WASP-100\"\n", + "ra, dec = 68.95970886173, -64.02703718088\n", + "sources = fs.find_sources(ra, dec, verbose=False)" + ] + }, + { + "cell_type": "markdown", + "id": "4", + "metadata": {}, + "source": [ + "Next we can calculate the contamination for a given instrument at a given PA. PA values can range from (0, 360) and supported subarrays currently include 'NIS_SUBSTRIP256' and 'NIS_SUBSTRIP96'." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5", + "metadata": {}, + "outputs": [], + "source": [ + "# Calculate contamination\n", + "pa = 200\n", + "aperture = 'NIS_SUBSTRIP256'\n", + "result, plot = fs.calc_v3pa(pa, sources, aperture, plot=True, verbose=False)\n", + "\n", + "# Plot it!\n", + "show(plot)" + ] + }, + { + "cell_type": "markdown", + "id": "6", + "metadata": {}, + "source": [ + "In the top plot, the red lines indicate stars and the pink lines indicate extended sources. The solid lines indicate the target traces and the dashed lines indicate contaminant traces. The circles indicate 0th order contaminant traces. Hover over any glyph to see more information about the source.\n", + "\n", + "In the bottom plot, the shaded regions show the estimated contamination from neighboring sources in each column for each order trace.\n", + "\n", + "### Simulate the estimated contamination at all position angles\n", + "If you supply the `field_simulation` function with the RA and Dec of interest, most of the work is done for you and an estimate of the contamination at all position angles is shown. This can be used to quickly see which position angle ranges are suitable for your observations." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7", + "metadata": {}, + "outputs": [], + "source": [ + "# Run the field simulation for all PAs\n", + "targframe, contamframe, results = fs.field_simulation(ra, dec, aperture, plot=False, multi=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8", + "metadata": {}, + "outputs": [], + "source": [ + "# Get bad PA list from missing angles between 0 and 360\n", + "badPAs = [j for j in np.arange(0, 360) if j not in [i['pa'] for i in results]]\n", + "\n", + "# Make old contam plot. Some reshaping required.\n", + "simframe = np.zeros((362, 2048, 256))\n", + "simframe[0, :, :] = (targframe[0]).T[::-1, ::-1]\n", + "simframe[1, :, :] = (targframe[1]).T[::-1, ::-1]\n", + "simframe[2:, :, :] = contamframe.swapaxes(1, 2)[:, ::-1, ::-1]\n", + "contam_plot = cf.contam(simframe, subarray, targetName=target, badPAs=badPAs)\n", + "show(contam_plot)" + ] + }, + { + "cell_type": "markdown", + "id": "9", + "metadata": {}, + "source": [ + "### Explore contamination for all valid position angles\n", + "You can also simulate each field individually for a closer visual inspection with an interactive plot. We'll just use the results from above. Drag the slider to see the simulated contamination at each visible PA. The grey areas indicate position angles where the target is not visible." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "10", + "metadata": {}, + "outputs": [], + "source": [ + "interactive_plot = fs.contam_slider_plot(results)\n", + "show(interactive_plot)" + ] + }, + { + "cell_type": "markdown", + "id": "11", + "metadata": {}, + "source": [ + "## Validation Tests\n", + "Below are some tests to show how well the Contamination and Visibility Tool is predicting the locations of contaminant traces. The `calc_vpa3` function also accepts on-sky SOSS data as a quality check for the output simulations." + ] + }, + { + "cell_type": "markdown", + "id": "12", + "metadata": {}, + "source": [ + "### Download the data\n", + "First, lets's download the data we need to test the Contamination and Visibility Tool from MAST. Let's use WASP-39 as our example. Feel free to change it to your target of choice! The code accepts only a single 2D frame at a time so let's just use the 2D `_rate.fits` data product." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "13", + "metadata": {}, + "outputs": [], + "source": [ + "# Specify the observation\n", + "observation = Observations.query_criteria(instrument_name='NIRISS/SOSS', filters='CLEAR;GR700XD', target_name='WASP-39')\n", + "\n", + "# Get the data products\n", + "data_products = Observations.get_product_list(observation)\n", + "\n", + "# Filter them to get rates only\n", + "rate_file = Observations.filter_products(data_products, dataproduct_type='spectrum', productType='SCIENCE', productSubGroupDescription='RATE')[2]\n", + "\n", + "# Download the data\n", + "dl_table = Observations.download_products(rate_file)\n", + "\n", + "# Get the path to the rate file\n", + "rate_path = dl_table['Local Path'][0]" + ] + }, + { + "cell_type": "markdown", + "id": "14", + "metadata": {}, + "source": [ + "### Load the data into the session\n", + "Now we can read in the data and get the target RA, Dec, and position angle we want to replicate." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "15", + "metadata": {}, + "outputs": [], + "source": [ + "# Observation parameters\n", + "rate_data = fits.getdata(rate_path)\n", + "ra = fits.getval(rate_path, 'TARG_RA')\n", + "dec = fits.getval(rate_path, 'TARG_DEC')\n", + "pa = int(fits.getval(rate_path, 'GS_V3_PA'))\n", + "obsdate = fits.getval(rate_path, 'DATE-BEG')[:4]\n", + "subarray = 'NIS_' + fits.getval(rate_path, 'SUBARRAY')" + ] + }, + { + "cell_type": "markdown", + "id": "16", + "metadata": {}, + "source": [ + "### Find all nearby sources\n", + "Next, we have to search the neighborhood for our target as well as any potentially contaminanting sources. The `find_sources` function also accepts a `target_date` to specify the calendar year when the observations were taken so that proper motions of the sources can be accounted for. It is set to the current year by default but we will supply a 4-digit year from the observation `obsdate` so we know it matches." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "17", + "metadata": {}, + "outputs": [], + "source": [ + "# Find all sources (including the target) in the field of view\n", + "sources = fs.find_sources(ra, dec, target_date=obsdate, verbose=False)" + ] + }, + { + "cell_type": "markdown", + "id": "18", + "metadata": {}, + "source": [ + "### Run the Contamination and Visibility Tool\n", + "Now that we have our data and observation parameters, we can run the tool." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "19", + "metadata": {}, + "outputs": [], + "source": [ + "# Calculate contamination\n", + "result_onsky, plot_onsky = fs.calc_v3pa(pa, sources, subarray, data=rate_data, plot=True, verbose=False)\n", + "\n", + "# Plot it!\n", + "show(plot_onsky)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "20", + "metadata": {}, + "outputs": [], + "source": [ + "# For comparison, let's see what the simulation looks like\n", + "result_sim, plot_sim = fs.calc_v3pa(pa, sources, subarray, plot=True, verbose=False)\n", + "\n", + "# Plot it!\n", + "show(plot_sim)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "21", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "exoctk-3.11", + "language": "python", + "name": "exoctk-3.11" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.0" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/exoctk/tests/test_utils.py b/exoctk/tests/test_utils.py index a18f2f3b6..dbc5d53ec 100644 --- a/exoctk/tests/test_utils.py +++ b/exoctk/tests/test_utils.py @@ -28,7 +28,7 @@ # Include 3 actual planets with data and one fake planet to xfail. TEST_DATA = [('HD108236f', {'canonical_name': 'HD 108236 f', 'Fe/H': -0.28, 'Teff': 5660.0, 'stellar_gravity': 4.49, 'transit_duration': 0.13625, 'RA': 186.5740627, 'DEC': -51.3630519}), ('NGTS-14Ab', {'canonical_name': 'NGTS-14 A b', 'Fe/H': 0.1, 'Teff': 5187.0, 'stellar_gravity': 4.2, 'transit_duration': 0.09333333333333334, 'RA': 328.5174799, 'DEC': -38.3774193}), - ('2MASSJ10193800-0948225b', {'canonical_name': 'WASP-43 b', 'Fe/H': -0.01, 'Teff': 4400.0, 'stellar_gravity': 4.49, 'transit_duration': 0.0513, 'RA': 154.9081869, 'DEC': -9.8064431}), + ('2MASSJ10193800-0948225b', {'canonical_name': 'WASP-43 b', 'Fe/H': -0.05, 'Teff': 4400.0, 'stellar_gravity': 4.65, 'transit_duration': 0.0526235, 'RA': 154.9081869, 'DEC': -9.8064431}), pytest.param('djgfjhsg', {'canonical_name': 'sfghsfkjg', 'Fe/H': -999, 'Teff': -999, 'stellar_gravity': -999, 'transit_duration': -999, 'RA': -999, 'DEC': -999}, marks=pytest.mark.xfail)] MEDFILT_DATA = [(np.array([1, 1, 8, 12, 2, 10, 5, 2, 5, 2]), 4), diff --git a/exoctk/utils.py b/exoctk/utils.py index 1d2c6a341..3b9f4afcc 100755 --- a/exoctk/utils.py +++ b/exoctk/utils.py @@ -12,13 +12,16 @@ import shutil import urllib import sys -from importlib.metadata import version from astropy.io import fits import bokeh.palettes as bpal from scipy.interpolate import RegularGridInterpolator +from scipy.ndimage import generic_filter import numpy as np from svo_filters import svo +from bokeh.plotting import figure, show + +from ._version import __version__ try: from .throughputs import JWST_THROUGHPUTS @@ -35,8 +38,7 @@ # Supported profiles PROFILES = ['linear', 'quadratic', 'square-root', 'logarithmic', 'exponential', '3-parameter', '4-parameter'] -VERSION = version('exoctk') -PATCHVER = 'v' + '.'.join(VERSION.split('.')[:2]) # So we don't have to update EXOCTK_DATA for nano releases +PATCHVER = 'v' + '.'.join(__version__.split('.')[:2]) # So we don't have to update EXOCTK_DATA for nano releases DATA_URLS = { 'exoctk_contam': [f'https://data.science.stsci.edu/redirect/JWST/ExoCTK/compressed/exoctk_contam{PATCHVER}.tar.gz'], 'groups_integrations': [f'https://data.science.stsci.edu/redirect/JWST/ExoCTK/compressed/groups_integrations{PATCHVER}.tar.gz'], @@ -95,6 +97,68 @@ def enablePrint(): sys.stdout = sys.__stdout__ +def add_array_at_position(Arr, B, x, y, centered=False, plot=False): + """ + Adds array B to array Arr at position (x, y). + If centered=False, (x, y) is the lower-left corner of B in Arr. + If centered=True, (x, y) is the center of B in Arr. + Any part of B that goes outside the bounds of Arr is trimmed. + + Parameters + ---------- + Arr: sequence + The array being added to + B: sequence + The array being added to Arr + x: int + The x-index of Arr at which to add B + y: int + The y-index of Arr at which to add B + centered: bool + Add center of B at (x, y) rather than using lower left corner of B + plot: bool + Plot the final image for visual inspection + + Returns + ------- + array + The final array + """ + A = Arr.copy() # Avoid modifying original + hA, wA = A.shape + hB, wB = B.shape + + if centered: + x -= wB // 2 + y -= hB // 2 + + # Determine region of A that will be affected + x0 = max(x, 0) + y0 = max(y, 0) + x1 = min(x + wB, wA) + y1 = min(y + hB, hA) + + # If B doesn't overlap A, return A unchanged + if x0 >= x1 or y0 >= y1: + return A + + # Corresponding region in B + Bx0 = x0 - x + By0 = y0 - y + Bx1 = Bx0 + (x1 - x0) + By1 = By0 + (y1 - y0) + + A[y0:y1, x0:x1] += B[By0:By1, Bx0:Bx1] + + if plot: + p = figure(x_range=(0, A.shape[1]), y_range=(0, A.shape[0]), + tools="", toolbar_location=None, match_aspect=True) + p.image(image=[A], x=0, y=0, dw=A.shape[1], dh=A.shape[0], palette="Greys256") + show(p) + + return A + + def build_target_url(target_name): """Build restful api url based on target name. @@ -652,6 +716,49 @@ def medfilt(x, window_len): return np.median(y[window_len - 1: -window_len + 1], axis=1) +def nanmean_filter(values): + """ + Custom filter function that replaces the center value with the mean of the surrounding + 8 values, ignoring NaNs. If the center is not NaN, it is returned unchanged. If all + neighbors are NaN, the center remains NaN. + + Parameters: + values (array-like): Flattened 3x3 neighborhood of values. + + Returns: + float: The replacement value for the center pixel. + """ + center = values[len(values) // 2] + if np.isnan(center): + neighbors = np.delete(values, len(values) // 2) # Remove center + mean = np.nanmean(neighbors) + return mean if not np.isnan(mean) else center + else: + return center + + +def replace_NaNs(arr, max_iter=100): + """ + Recursively replaces NaN values in a 2D NumPy array with the mean of their 8 + surrounding neighbors, ignoring NaNs. Continues until all NaNs are filled or + the maximum number of iterations is reached. + + Parameters: + arr (np.ndarray): 2D NumPy array containing NaNs to fill. + max_iter (int): Maximum number of iterations to attempt (default: 100). + + Returns: + np.ndarray: A copy of the array with NaNs filled. + """ + result = arr.copy() + for _ in range(max_iter): + if not np.isnan(result).any(): + break + updated = generic_filter(result, nanmean_filter, size=3, mode='constant', cval=np.nan) + result[np.isnan(result)] = updated[np.isnan(result)] + return result + + def rebin_spec(spec, wavnew, oversamp=100, plot=False): """ Rebin a spectrum to a new wavelength array while preserving diff --git a/setup.py b/setup.py index d6297b00e..087cae8ea 100755 --- a/setup.py +++ b/setup.py @@ -1,7 +1,26 @@ -import numpy as np +import os +import re from setuptools import setup +import numpy as np + + +def get_version(): + here = os.path.abspath(os.path.dirname(__file__)) + version_path = os.path.join(here, "exoctk", "_version.py") + + with open(version_path, encoding="utf-8") as f: + content = f.read() + + match = re.search(r'^__version__ = ["\']([^"\']+)["\']', content, re.M) + if not match: + raise RuntimeError("Unable to find __version__ string in _version.py") + + return match.group(1) + setup( + name="exoctk", + version=get_version(), include_dirs=[np.get_include()], - version='1.2.6.4', + # You likely have more fields to include from pyproject.toml )