From 2fff257d81139fe50c9cfb49eeed1588f8b0b24a Mon Sep 17 00:00:00 2001 From: Thomas Williams Date: Thu, 19 Feb 2026 10:27:46 +0000 Subject: [PATCH] Wrap into pip-installable package - Fix step occasionally being 0 in noise cube generation - Add dependabot, and build actions to ensure package installs correctly on Python matrix - Add CODEOWNERS, so PRs get assigned automatically - Update CITATION.md to CITATION.cff, and format correctly - Rename LICENSE.rst to just LICENSE - Rewrite pyproject.toml to allow pip-installation - Expand list of authors/maintainers/citation.cff - Rename README.rst to README.md - Significant reformatting and updating to reflect packaging - Include details on chunked imaging - Remove setup.cfg/setup.py - Remove unused _astropy_init.py - Remove unused conftest.py - Remove unused data/ directory in phangsPipeline - Remove unused and very outdated PHANGSPipelineReadme.md in phangsPipeline - Moved casaBlankCleanRecipe to scripts, and renamed - Refactored versioning throughout, now pulls automatically from the package - Fully align CASA version throughout - General import tidy-up - Move various example scripts into scripts/ directory - Consolidate run script into run_pipeline_phangs-alma.py - Update README.md to include link to v3.2 - Move links from akleroy->PhangsTeam throughout - Add CHANGES.rst, and workflow to make sure CHANGES.rst has been edited on a PR - Address comments from @e-koch --- .github/dependabot.yml | 20 + .github/workflows/build.yml | 36 + .github/workflows/check-changelog.yml | 16 + .github/workflows/publish.yml | 36 + CHANGES.rst | 4 + CITATION.cff | 133 + CITATION.md | 23 - CODEOWNERS | 2 + LICENSE.rst => LICENSE | 0 README.md | 152 + README.rst | 125 - phangsPipeline/PHANGSPipelineREADME.md | 118 - phangsPipeline/__init__.py | 57 +- phangsPipeline/_astropy_init.py | 17 - phangsPipeline/casaCubeRoutines.py | 40 +- phangsPipeline/casaFeatherRoutines.py | 26 +- phangsPipeline/casaImagingRoutines.py | 24 +- .../casaLegacySingleDishRoutines.py | 468 +-- phangsPipeline/casaMaskingRoutines.py | 37 +- phangsPipeline/casaMosaicRoutines.py | 22 +- phangsPipeline/casaRoutineTests.py | 22 +- phangsPipeline/casaSingleDishALMAWrapper.py | 20 +- phangsPipeline/casaStuff.py | 288 +- phangsPipeline/casaVisRoutines.py | 60 +- .../{casa_check.py => check_imports.py} | 26 +- phangsPipeline/clean_call.py | 6 +- phangsPipeline/conftest.py | 59 - phangsPipeline/data/README.rst | 6 - phangsPipeline/ftplane_convolution.py | 12 +- phangsPipeline/handlerAlmaDownload.py | 1666 ++++---- phangsPipeline/handlerDerived.py | 3133 +++++++-------- phangsPipeline/handlerImaging.py | 50 +- phangsPipeline/handlerImagingChunked.py | 36 +- phangsPipeline/handlerKeys.py | 11 +- phangsPipeline/handlerPostprocess.py | 3540 ++++++++--------- phangsPipeline/handlerRelease.py | 23 +- phangsPipeline/handlerSingleDish.py | 635 ++- phangsPipeline/handlerTemplate.py | 5 +- phangsPipeline/handlerTestImaging.py | 5 +- phangsPipeline/handlerVis.py | 1931 +++++---- phangsPipeline/pipelineLogger.py | 18 +- phangsPipeline/pipelineVersion.py | 14 - phangsPipeline/scConvolution.py | 5 +- phangsPipeline/scDerivativeRoutines.py | 17 +- phangsPipeline/scMaskingRoutines.py | 20 +- phangsPipeline/scMoments.py | 2 +- phangsPipeline/scNoiseRoutines.py | 17 +- phangsPipeline/scStackingRoutines.py | 6 +- phangsPipeline/statsHandler.py | 26 +- phangsPipeline/taskSDIntImaging.py | 16 +- phangsPipeline/tests/__init__.py | 4 - phangsPipeline/utilsFieldSelection.py | 16 +- phangsPipeline/utilsFilenames.py | 3 +- phangsPipeline/utilsImages.py | 13 +- phangsPipeline/utilsKeyReaders.py | 8 +- phangsPipeline/utilsLines.py | 2 +- phangsPipeline/utilsLists.py | 1 - phangsPipeline/utilsSingleDish.py | 11 +- phangsPipeline/utilsTestImagingPlots.py | 2 +- phangsPipeline/utilsTheoreticalNoise.py | 16 - pyproject.toml | 81 +- run_casa_pipeline_for_tp.py | 71 - run_casa_pipeline_phangs-alma.py | 188 - run_derived_pipeline_phangs-alma.py | 204 - run_pipeline_phangs-alma.py | 292 ++ .../blank_clean_recipes.py | 2 +- .../calctimeonsource.py | 8 +- .../example_make_products.py | 0 .../examples_on_clusters}/README.rst | 0 .../imaging_in_chunks/README.rst | 0 .../job_gather_chunks_to_cube.sh | 0 .../jobarray_imaging_per_chunk.sh | 0 .../run_casa_find_nchunks.py | 0 .../run_casa_gather_chunks.py | 0 .../run_casa_imaging_perchunk.py | 0 .../imaging_per_target/README.rst | 0 .../create_imaging_job_config_files.py | 0 ...jobarray_field_line_staging_imaging_job.sh | 0 .../keys_hydra/cleanmask_key.txt | 0 .../keys_hydra/config_definitions.txt | 0 .../line_staging_imaging.all.1.jobconfig.txt | 0 .../keys_hydra/continuum_mosaic.clean | 0 .../keys_hydra/cube_mosaic.clean | 0 .../keys_hydra/derived_key.txt | 0 .../imaging_per_target/keys_hydra/dir_key.txt | 0 .../keys_hydra/dir_key_mosaic.txt | 0 .../keys_hydra/distance_key.txt | 0 .../keys_hydra/imaging_recipes.txt | 0 .../keys_hydra/linearmosaic_definitions.txt | 0 .../keys_hydra/master_key.txt | 0 .../keys_hydra/moment_key.txt | 0 .../keys_hydra/ms_file_key.txt | 0 .../keys_hydra/overrides.txt | 0 .../keys_hydra/singledish_key.txt | 0 .../keys_hydra/target_definitions.txt | 0 ...pipeline_stage_image_permosaic_jobarray.py | 0 .../print_uv_ranges.py | 2 + .../run_casa_imaging_chunked_example.py | 0 .../run_casa_imaging_chunked_ngc1097.py | 0 .../run_casa_test_imaging_example.py | 0 setup.cfg | 88 - setup.py | 78 - 102 files changed, 6608 insertions(+), 7513 deletions(-) create mode 100644 .github/dependabot.yml create mode 100644 .github/workflows/build.yml create mode 100644 .github/workflows/check-changelog.yml create mode 100644 .github/workflows/publish.yml create mode 100644 CHANGES.rst create mode 100644 CITATION.cff delete mode 100644 CITATION.md create mode 100644 CODEOWNERS rename LICENSE.rst => LICENSE (100%) create mode 100644 README.md delete mode 100644 README.rst delete mode 100644 phangsPipeline/PHANGSPipelineREADME.md delete mode 100644 phangsPipeline/_astropy_init.py rename phangsPipeline/{casa_check.py => check_imports.py} (54%) delete mode 100644 phangsPipeline/conftest.py delete mode 100644 phangsPipeline/data/README.rst delete mode 100644 phangsPipeline/pipelineVersion.py delete mode 100644 phangsPipeline/tests/__init__.py delete mode 100644 run_casa_pipeline_for_tp.py delete mode 100644 run_casa_pipeline_phangs-alma.py delete mode 100644 run_derived_pipeline_phangs-alma.py create mode 100644 run_pipeline_phangs-alma.py rename phangsPipeline/casaBlankCleanRecipes.py => scripts/blank_clean_recipes.py (89%) rename {phangsPipeline => scripts}/calctimeonsource.py (91%) rename example_make_products.py => scripts/example_make_products.py (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/README.rst (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_in_chunks/README.rst (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_in_chunks/job_gather_chunks_to_cube.sh (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_in_chunks/jobarray_imaging_per_chunk.sh (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_in_chunks/run_casa_find_nchunks.py (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_in_chunks/run_casa_gather_chunks.py (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_in_chunks/run_casa_imaging_perchunk.py (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/README.rst (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/create_imaging_job_config_files.py (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/jobarray_field_line_staging_imaging_job.sh (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/keys_hydra/cleanmask_key.txt (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/keys_hydra/config_definitions.txt (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/keys_hydra/config_lines/line_staging_imaging.all.1.jobconfig.txt (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/keys_hydra/continuum_mosaic.clean (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/keys_hydra/cube_mosaic.clean (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/keys_hydra/derived_key.txt (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/keys_hydra/dir_key.txt (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/keys_hydra/dir_key_mosaic.txt (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/keys_hydra/distance_key.txt (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/keys_hydra/imaging_recipes.txt (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/keys_hydra/linearmosaic_definitions.txt (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/keys_hydra/master_key.txt (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/keys_hydra/moment_key.txt (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/keys_hydra/ms_file_key.txt (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/keys_hydra/overrides.txt (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/keys_hydra/singledish_key.txt (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/keys_hydra/target_definitions.txt (100%) rename {examples_on_clusters => scripts/examples_on_clusters}/imaging_per_target/run_casa_pipeline_stage_image_permosaic_jobarray.py (100%) rename {phangsPipeline => scripts}/print_uv_ranges.py (98%) rename run_casa_imaging_chunked_example.py => scripts/run_casa_imaging_chunked_example.py (100%) rename run_casa_imaging_chunked_ngc1097.py => scripts/run_casa_imaging_chunked_ngc1097.py (100%) rename run_casa_test_imaging_example.py => scripts/run_casa_test_imaging_example.py (100%) delete mode 100644 setup.cfg delete mode 100755 setup.py diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..97e364a5 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,20 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file + +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + target-branch: "master" + schedule: + interval: "weekly" + + # Maintain dependencies for pip + - package-ecosystem: "pip" + directory: "/" + target-branch: "master" + schedule: + interval: "weekly" + open-pull-requests-limit: 10 diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 00000000..156514bc --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,36 @@ +name: Build + +on: + push: + branches: + - '*' + pull_request: + branches: + - master + +jobs: + job: + name: Build + runs-on: ubuntu-latest + strategy: + matrix: + # Versions listed at https://raw.githubusercontent.com/actions/python-versions/main/versions-manifest.json + python-version: [ + "3.12", + ] + steps: + - uses: actions/checkout@v6 + with: + submodules: true + - name: Setup Python ${{ matrix.python-version }} + uses: actions/setup-python@v6 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build + pip install . + pip install .[casa] + - name: Build package + run: python -m build diff --git a/.github/workflows/check-changelog.yml b/.github/workflows/check-changelog.yml new file mode 100644 index 00000000..b3093c55 --- /dev/null +++ b/.github/workflows/check-changelog.yml @@ -0,0 +1,16 @@ +name: Check Changelog + +on: + pull_request: + types: [assigned, opened, synchronize, reopened, labeled, unlabeled] + branches: + - master + +jobs: + Check-Changelog: + name: Check Changelog Action + runs-on: ubuntu-latest + steps: + - uses: tarides/changelog-check-action@v3 + with: + changelog: CHANGES.rst diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 00000000..4380ad0f --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,36 @@ +name: Build and upload to PyPI + +on: [push, pull_request] + +jobs: + build_sdist_and_wheel: + name: Build source distribution + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + - uses: actions/setup-python@v6 + name: Install Python + with: + python-version: "3.12" + - name: Install build + run: python -m pip install build + - name: Build sdist + run: python -m build --sdist --wheel --outdir dist/ . + - uses: actions/upload-artifact@v6 + with: + path: dist/* + +# upload_pypi: +# name: Upload to PyPI +# needs: [build_sdist_and_wheel] +# runs-on: ubuntu-latest +# if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') +# steps: +# - uses: actions/download-artifact@v7 +# with: +# name: artifact +# path: dist +# - uses: pypa/gh-action-pypi-publish@release/v1 +# with: +# user: __token__ +# password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/CHANGES.rst b/CHANGES.rst new file mode 100644 index 00000000..ea7e3f45 --- /dev/null +++ b/CHANGES.rst @@ -0,0 +1,4 @@ +4.0.0 (Unreleased) +================== + +- Initial pip-installable version (#292) diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 00000000..5c70ee86 --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,133 @@ +cff-version: 1.2.0 +message: "If you use this software, please cite it as below." +title: "phangsPipeline" +authors: +- family-names: "Leroy" + given-names: "Adam" +- family-names: "Hughes" + given-names: "Annie" +- family-names: "Liu" + given-names: "Daizhong" +- family-names: "Pety" + given-names: "Jerome" +- family-names: "Rosolowsky" + given-names: "Erik" +- family-names: "Saito" + given-names: "Toshiki" +- family-names: "Schinnerer" + given-names: "Eva" +- family-names: "Schruba" + given-names: "Andreas" +- family-names: "Usero" + given-names: "Antonio" +- family-names: "Faesi" + given-names: "Christopher" +- family-names: "Herrera" + given-names: "Cinthya" +- family-names: "Chevance" + given-names: "Melanie" +- family-names: "Hygate" + given-names: "Christopher" +- family-names: "Kepley" + given-names: "Amanda" +- family-names: "Koch" + given-names: "Eric" +- family-names: "Querejeta" + given-names: "Miguel" +- family-names: "Sliwa" + given-names: "Kazimierz" +- family-names: "Will" + given-names: "David" +- family-names: "Wilson" + given-names: "Christine" +- family-names: "Anand" + given-names: "Gagandeep" +- family-names: "Barnes" + given-names: "Ashley" +- family-names: "Belfiore" + given-names: "Francesco" +- family-names: "Beslic" + given-names: "Ivana" +- family-names: "Bigiel" + given-names: "Frank" +- family-names: "Blanc" + given-names: "Guillermo" +- family-names: "Bolatto" + given-names: "Alberto" +- family-names: "Boquien" + given-names: "Mederic" +- family-names: "Cao" + given-names: "Yixian" +- family-names: "Chandar" + given-names: "Rupali" +- family-names: "Chastenet" + given-names: "Jeremy" +- family-names: "Chiang" + given-names: "I-Da" +- family-names: "Congiu" + given-names: "Enrico" +- family-names: "Dale" + given-names: "Daniel" +- family-names: "Deger" + given-names: "Sinan" +- family-names: "den Brok" + given-names: "Jakob" +- family-names: "Eibensteiner" + given-names: "Cosima" +- family-names: "Emsellem" + given-names: "Eric" +- family-names: "Garcia-Rodriguez" + given-names: "Axel" +- family-names: "Kim" + given-names: "Jaeyeon" +- family-names: "Klessen" + given-names: "Ralf" +- family-names: "Kreckel" + given-names: "Kathryn" +- family-names: "Kruijssen" + given-names: "Diederik" +- family-names: "Larson" + given-names: "Kirsten" +- family-names: "Lee" + given-names: "Janice" +- family-names: "Mayker" + given-names: "Ness" +- family-names: "McElroy" + given-names: "Rebecca" +- family-names: "Meidt" + given-names: "Sharon" +- family-names: "Mok" + given-names: "Angus" +- family-names: "Pan" + given-names: "Hsi-An" +- family-names: "Puschnig" + given-names: "Johannes" +- family-names: "Razza" + given-names: "Alessandro" +- family-names: "Sanchez-Blazquez" + given-names: "Patricia" +- family-names: "Sandstrom" + given-names: "Karin" +- family-names: "Santoro" + given-names: "Francesco" +- family-names: "Sardone" + given-names: "Amy" +- family-names: "Scheuermann" + given-names: "Fabian" +- family-names: "Sun" + given-names: "Jiayi" +- family-names: "Thilker" + given-names: "David" +- family-names: "Turner" + given-names: "Jordan" +- family-names: "Ubeda" + given-names: "Leonardo" +- family-names: "Utomo" + given-names: "Dyas" +- family-names: "Watkins" + given-names: "Elizabeth" +- family-names: "Williams" + given-names: "Thomas" +version: 4.0.0 +doi: "10.3847/1538-4365/abec80" +url: "https://github.com/phangsTeam/phangs_imaging_scripts/" diff --git a/CITATION.md b/CITATION.md deleted file mode 100644 index 1695ff4e..00000000 --- a/CITATION.md +++ /dev/null @@ -1,23 +0,0 @@ -If you use these programs, we request that you acknowledge their use by citing the PHANGS-ALMA data reduction pipeline paper, which presents the software - -ADS link: - -https://ui.adsabs.harvard.edu/abs/2021arXiv210407665L/abstract - -bibtex entr: - -@ARTICLE{2021arXiv210407665L, - author = {{Leroy}, Adam K. and {Hughes}, Annie and {Liu}, Daizhong and {Pety}, Jerome and {Rosolowsky}, Erik and {Saito}, Toshiki and {Schinnerer}, Eva and {Schruba}, Andreas and {Usero}, Antonio and {Faesi}, Christopher M. and {Herrera}, Cinthya N. and {Chevance}, Melanie and {Hygate}, Alexander P.~S. and {Kepley}, Amanda A. and {Koch}, Eric W. and {Querejeta}, Miguel and {Sliwa}, Kazimierz and {Will}, David and {Wilson}, Christine D. and {Anand}, Gagandeep S. and {Barnes}, Ashley and {Belfiore}, Francesco and {Beslic}, Ivana and {Bigiel}, Frank and {Blanc}, Guillermo A. and {Bolatto}, Alberto D. and {Boquien}, Mederic and {Cao}, Yixian and {Chandar}, Rupali and {Chastenet}, Jeremy and {Chiang}, I-Da and {Congiu}, Enrico and {Dale}, Daniel A. and {Deger}, Sinan and {den Brok}, Jakob S. and {Eibensteiner}, Cosima and {Emsellem}, Eric and {Garc{\i}a-Rodr{\i}guez}, Axel and {Glover}, Simon C.~O. and {Grasha}, Kathryn and {Groves}, Brent and {Henshaw}, Jonathan D. and {Jimenez Donaire}, Maria J. and {Kim}, Jenny J. and {Klessen}, Ralf S. and {Kreckel}, Kathryn and {Kruijssen}, J.~M. Diederik and {Larson}, Kirsten L. and {Lee}, Janice C. and {Mayker}, Ness and {McElroy}, Rebecca and {Meidt}, Sharon E. and {Mok}, Angus and {Pan}, Hsi-An and {Puschnig}, Johannes and {Razza}, Alessandro and {Sanchez-Blazquez}, Patricia and {Sandstrom}, Karin M. and {Santoro}, Francesco and {Sardone}, Amy and {Scheuermann}, Fabian and {Sun}, Jiayi and {Thilker}, David A. and {Turner}, Jordan A. and {Ubeda}, Leonardo and {Utomo}, Dyas and {Watkins}, Elizabeth J. and {Williams}, Thomas G.}, - title = "{PHANGS-ALMA Data Processing and Pipeline}", - journal = {arXiv e-prints}, - keywords = {Astrophysics - Instrumentation and Methods for Astrophysics, Astrophysics - Astrophysics of Galaxies}, - year = 2021, - month = apr, - eid = {arXiv:2104.07665}, - pages = {arXiv:2104.07665}, -archivePrefix = {arXiv}, - eprint = {2104.07665}, - primaryClass = {astro-ph.IM}, - adsurl = {https://ui.adsabs.harvard.edu/abs/2021arXiv210407665L}, - adsnote = {Provided by the SAO/NASA Astrophysics Data System} -} diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 00000000..99a7fd6b --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1,2 @@ +# Specify a default Code Owner for all files with a wildcard: +* @thomaswilliamsastro \ No newline at end of file diff --git a/LICENSE.rst b/LICENSE similarity index 100% rename from LICENSE.rst rename to LICENSE diff --git a/README.md b/README.md new file mode 100644 index 00000000..5aedbe92 --- /dev/null +++ b/README.md @@ -0,0 +1,152 @@ +# The PHANGS-ALMA Pipeline + +## Preface + +### Contents + +This is the [PHANGS](https://sites.google.com/view/phangs/home) post-processing and science-ready data product pipeline. +This pipeline processes data from calibrated visibilities to science-ready spectral cubes and maps. +The procedures and background for key parts of the pipeline are discussed in the Astrophysical Journal Supplements +Paper [PHANGS-ALMA Data Processing and Pipeline](https://ui.adsabs.harvard.edu/abs/2021ApJS..255...19L/abstract). +Please consult that paper for more background and details. + +### What this pipeline is for + +This pipeline is devised to process data from radio interferometer observations (from, e.g., ALMA or VLA). +It is applied to calibrated visibilities, as those generated by the CASA software, and delivers science-ready spectral +cubes and moment maps, along with associated uncertainty maps. In this regard, the PHANGS-ALMA pipeline offers a +flexible alternative to the `scriptForImaging` script distributed by ALMA. +A detailed list of the derived data products can be found in Section 7 of the paper mentioned above. The pipeline can +also process Total Power data from ALMA. + +### Pipeline and configuration files + +This repository contains the scripts that comprise the PHANGS-ALMA pipeline. +Configuration files for a large set of PHANGS projects, including the live version of the files for the +PHANGS-ALMA CO survey, exist in a [separate repository](https://github.com/PhangsTeam/phangs_pipeline_configs). +We include a frozen set of files that can be used to reduce PHANGS-ALMA as examples here. +If you need access to those other repositories or need examples, please request access as needed. + +### Contact + +For issues, the preferred method is to open an issue on the +[GitHub issues page](https://github.com/phangsTeam/phangs_imaging_scripts/issues). + +## Installation + +We recommend installing the pipeline in a separate [Conda](https://www.anaconda.com/) environment. + +The pipeline works in Python>=3.12, CASA>=6.7.3, and is pip installable: + +```bash +pip install git+https://github.com/phangsTeam/phangs_imaging_scripts.git +``` + +Or, if using a local installation: + +```bash +cd /path/to/phangs_imaging_scripts +pip install -e . +``` + +If you are using a monolithic CASA installation, you can run this within +the CASA shell. You may need to add pip to PATH to get CASA to install Astropy. +For that, you can see details [here](https://casadocs.readthedocs.io/en/stable/notebooks/frequently-asked-questions.html#Astropy-in-monolithic-CASA). +Note that if you are running within monolithic CASA and want +to make use of the single dish imaging capabilities, you will need to run +a pipeline version. + +By default, the PHANGS-ALMA pipeline will not install CASA-related packages, and so will +by default not be able to image data if running through pure Python. If running +inside monolithic CASA, these packages will already exist. If you want CASA +capabilities, then install the optional `casa` dependencies: + +```bash +pip install phangsPipeline[casa] @ git+https://github.com/phangsTeam/phangs_imaging_scripts.git +``` + +For local installations: +```bash +cd /path/to/phangs_imaging_scripts +pip install -e '.[casa]' +``` + +To check this has installed, in Python you can then import the pipeline: +```python +import phangsPipeline as ppl +``` + +You will also need to download [analysisUtils](https://doi.org/10.5281/zenodo.7502159). Make sure to grab the latest version, and append the location of +these scripts in your PATH. + +On the first run, you may get an error about downloading CASA data. In this case, ensure the directory it lists exists +and rerun. You can change this data path by editing config.py in ~/.casa. + +We maintain an older release of the pipeline [here](https://github.com/PhangsTeam/phangs_imaging_scripts/tree/v3.2). +This is somewhat more agnostic to CASA versions, but is unlikely to work with the latest CASA releases going forwards. + +## Running the pipeline + +There are two ways that this pipeline might be useful. First, it provides an end-to-end path to process calibrated +ALMA data (or VLA data) of the sort produced by the scriptForPI script distributed by ALMA into spectral cubes and maps. +That end-to-end approach is described in "Workflow for most users." Second, the `phangsPipeline` directory contains a +number of modules for use inside and outside CASA that should have general utility. These are written without requiring +any broader awareness of the pipeline infrastructure and should just be generally useful. These are files named +`casaSOMENAME.py` and `scSOMEOTHERNAME.py` and, to a lesser extent, `utilsYETANOTHERNAME.py`. + +## Workflow for most users + +If you just want to *use* the pipeline then you will need to do three things: + +0. Run `scriptForPI.py` to apply the observatory-provided calibration to your data (this is outside the pipeline remit). + The pipeline picks up from there, it does not replace the ALMA observatory calibration and flagging pipeline. +1. Make configuration files ("key files") that describe your project. + Usually you can copy and modify an existing project to get a good start. We provide PHANGS-ALMA as an example. +2. Run the pipeline scripts + +**The Easiest Way** This release includes the full PHANGS-ALMA set of keys and the scripts we use to run the pipeline +for PHANGS-ALMA. These are *heavily documented* - copy them to make your own script and configuration and follow the +documentation in those scripts to get started. To be specific: + +- The PHANGS-ALMA keys to reduce the data end-to-end from the archive are in: `phangs-alma_keys/` +- The script to run the pipeline is: `run_pipeline_phangs-alma.py` + +These can run the actual PHANGS-ALMA reduction, though in practice we used slightly more complex versions of a few +programs to manage the workflow. Copying and modifying these are your best bet, especially following the patterns in +the key files. + +## A few details on procedure + +The full procedure is described in our ApJ Supplements paper and the programs themselves are all in this repository, +so we do not provide any extremely detailed docs here. Many individual routines are documented, though we also intend +to improve the documentation in the future. Therefore, we just note that broadly, the pipeline runs in four stages: + +1. **Staging** Stage and process uv-data. This step includes continuum subtraction, line extraction, and spectral + regridding. +2. **Imaging** Image and deconvolve the uv-data. This runs in several steps: dirty imaging, clean mask alignment, + multi-scale deconvolution, re-masking, and single convolution. +3. **Post-Process** Process deconvolved data into science-ready data cubes. This stage includes merging with the + Total Power and mosaicking. +4. **Derived Products** Convolution, noise estimation, masking, and calculation of science-ready data products. + +The simplest way to run these is to edit `run_pipeline_phangs-alma.py` to point at your key files, and run. + +## Chunked imaging + +For large cubes, it may be beneficial to farm out each cube slice to a different machine (within some HPC environment) +and work on them in serial. For this, the `ImagingChunkedHandler` exists. There are some example scripts on how to use +this in the `scripts` directory. + +## Contents of the pipeline in more detail + +**Architecture**: The pipeline is organized and run by a series of "handler" objects. These handlers organize the list +of targets, array configurations, spectral products, and derived moments and execute loops. + +The routines to process individual data sets are in individual modules, grouped by theme (e.g., casaImagingRoutines or +scNoiseRoutines). These routines do not know about the larger infrastructure of arrays, targets, etc. They generally +take an input file, output file, and various keyword arguments. + +A project is defined by a series of text key files in a "key_directory". These define the measurement set inputs, +configurations, spectral line products, moments, and derived products. + +**User Control**: For the most part the user's job is to *define the key files* and to run some scripts. diff --git a/README.rst b/README.rst deleted file mode 100644 index f7a6a856..00000000 --- a/README.rst +++ /dev/null @@ -1,125 +0,0 @@ -## README for PHANGS-ALMA Pipeline Version 2.0 - -### PREFACE - -**Contents:** This is "version 2" of the [PHANGS](https://sites.google.com/view/phangs/home) post-processing and science-ready data product pipeline. These programs use CASA, as well as the Python package astropy and other affiliated packages (analysisutils, spectral-cube, reproject), to process data from calibrated visibilities to science-ready spectral cubes and maps. The procedures and background for key parts of the pipeline are discussed in the Astrophysical Journal Supplements Paper "PHANGS-ALMA Data Processing and Pipeline" by Leroy, Hughes, Liu, Pety, Rosolowsky, Saito, Schinnerer, Usero, Faesi, Herrera et al. [LINK](https://ui.adsabs.harvard.edu). Please consult that paper for more background and details. - -**What this pipeline is for:** This pipeline is devised to process data from radio interferometer observations (from, e.g., ALMA or VLA). It is applied to calibrated visibilities, as those generated by the CASA software, and delivers science-ready spectral cubes and moment maps, along with associated uncertainty maps. In this regard, the PHANGS-ALMA pipeline offers a flexible alternative to the `scriptForImaging` script distributed by ALMA. A detailed list of the derived data products can be found in Section 7 of the paper mentioned above. - -**Pipeline and Configuration Files:** These are the programs to run the PHANGS-ALMA pipeline. Configuration files for a large set of PHANGS projects, including the live version of the files for the PHANGS-ALMA CO survey, exist in a separate repository. We include a frozen set of files that can be used to reduce PHANGS-ALMA as examples here. If you need access to those other repositories or need examples, please request access as needed. - -**Contact:** For issues, the preferred method is to open an issue on the github issues page. If you have specific other topics to discuss you should reach out to Adam Leroy, Erik Rosolowsky, or Daizhong Liu via email. But opening issues is better. - -**Earlier Versions:** If you are looking for Version 1.0 of the pipeline, you can access it by changing branches to "version1.0". Note that this will mostly be for historical reasons. We suggest using Version 2.0 moving forward. - -**Total Power:** The pipeline can feather interferometer data with complementary Total Power observations. However, as of Version 2.0 it cannot process Total Power data on its own and assumes that a Total Power cube has been produced by other means. For the PHANGS-ALMA data release, these Total Power cubes were produced with the scripts and pipeline described by Herrera et al. here: https://github.com/PhangsTeam/TP_ALMA_data_reduction/ . - -### REQUIREMENTS - -The pipeline runs in two separate software environments: - -* [CASA](https://casa.nrao.edu/) 5.6 or 5.7 (Staging, Imaging and Post-Processing) - * Not yet tested for CASA 6.x -* Python 3.6 or later (Derived products) with modern versions of several packages - * [numpy](https://numpy.org) - * [scipy](https://www.scipy.org) - * [astropy](https://www.astropy.org) - * [spectral-cube](https://spectral-cube.readthedocs.io/en/latest/) - -We recommend a standard [anaconda](https://www.anaconda.com/) distribution for python. - -### TWO WAYS TO USE THE PIPELINE - -There are two ways that this pipeline might be useful. First, it provides an end-to-end path to process calibrated ALMA data (or VLA data) of the sort produced by the scriptForPI script distributed by ALMA into spectral cubes and maps. That end-to-end approach is described in "Workflow for most users." Second, the `phangsPipeline` directory contains a number of modules for use inside and outside CASA that should have general utility. These are written without requiring any broader awareness of the pipeline infrastructure and should just be generally useful. These are files named `casaSOMENAME.py` and `scSOMEOTHERNAME.py` and, to a lesser extent, `utilsYETANOTHERNAME.py`. - -### WORKFLOW FOR MOST USERS - -If you just want to *use* the pipeline then you will need to do three things: - -( 0. Run `scriptForPI.py` to apply the observatory-provided calibration to your data. The pipeline picks up from there, it does not replace the ALMA observatory calibration and flagging pipeline. ) - -1. Make configuration files ("key files") that describe your project. Usually you can copy and modify an existing project to get a good start. We provide PHANGS-ALMA as an example. - -2. Put together two small scripts: one to run the CASA stuff and another to run the pure python stuff. In theory these could be combined or generalized, but we usually just write a few small programs. - -3. Run these scripts in order. The CASA stuff runs inside a CASA shell - the pipeline seems to work up through CASA 5.7 and has been heavily used in 5.4 and 5.6, In theory it should be workable in CASA 6.1+ but this isn't for sure yet. The pure python stuff expects a distribution with numpy, astropy, spectral-cube, and scipy and python 3.6+ or so. - -**The Easiest Way** This release includes the full PHANGS-ALMA set of keys and the scripts we use to run the pipeline for PHANGS-ALMA. These are *heavily documented* - copy them to make your own script and configuration and follow the documented in those scripts to get started. To be specific: - -The PHANGS-ALMA keys to reduce the data end-to-end from the archive, along with heavy documentation are in: `phangs-alma_keys/` - -The script to run the CASA part of the pipeline is: `run_casa_pipeline_phangs-alma.py` - -The python (v3.x) script to create derived products is: `run_derived_pipeline_phangs-alma.py` - -These can run the actual PHANGS-ALMA reduction, though in practice we used slightly more complex versions of a few programs to manage the workflow. Copying and modifying these is your best bet, especially following the patterns in the key files. - -### A FEW DETAILS ON PROCEDURE - -The full procedure is described in our ApJ Supplements paper and the programs themselves are all in this repository, so we do not provide any extremely detailed docs here. Many individual routines are documented, though we also intend to improve the documentation in the future. Therefore we just note that broadly, the pipeline runs in four stages: - -1. **Staging (in CASA)** Stage and process uv-data. This step includes continuum subtraction, line extraction, and spectral regridding. - -2. **Imaging (in CASA)** Image and deconvolve the uv-data. This runs in several steps: dirty imaging, clean mask alignment, multi-scale deconvolution, re-masking, and single convolution. - -3. **Post-Process (in CASA)** Process deconvolved data into science-ready data cubes. This stage includes merging with the total power and mosaicking. - -4. **Derived Products (in python)** Convolution, noise estimation, masking, and calculation of science-ready data products. - -The simplest way to run these is to write two small scripts and do the following: - -1. Initialize CASA -2. Run a script that initializes a `keyHandler` object pointed at your key directory (see below). Then use this keyHandler to initialize handler objects for uv data, imaging, and postprocessing. Optionally restrict those objects of interest for each handler to a subset of targets, array configurations, or lines. -3. Inside the same script, run the main loop commands for each handler object. - -Then exit CASA and - -4. Initialize a python environment with scipy, numpy, astropy, and spectral-cube installed. -5. Run a script that initializes a `keyHandler` again pointed at your key directory, then use this keyHandler to initialize a derived product handler. -6. Run the main loop for the derived product handler. - -These two scripts are the ones listed above. They are heavily annotated and should provide a good starting point. - -### SINGLE DISH PROCESSING (still in development) - -The pipeline also includes scripts to execute the single dish processing described by Herrera et al. (2020). These capabilities are still somewhat in development. - -To use this capability: - -1. The measurement set key (e.g., ms_file_key) should include entries - labeled "tp" or similar. These "tp" entries in the measurements set - should refer to the root directory for the delivered data set - (i.e., up one level from calibrated/ scripts/ etc.). - -2. The configuration key (e.g., config_definitions) should include a - "singledish_config" named, e.g., "tp" or similar to be associated - with the measurement set entries. Associated keywords are - 'bl_order' and 'chan_dv_kms'. - -3. Output singledish file names need to be defined in the - singledish_key file. These are also later used to feather or sdint - image the interferometric data. - -4. Then there is a script - -### CONTENTS OF THE PIPELINE IN MORE DETAIL - -**Architecture**: The pipeline is organized and run by a series of -"handler" objects. These handlers organize the list of targets, array -configurations, spectral products, and derived moments and execute -loops. - -The routines to process individual data sets are in individual -modules, grouped by theme (e.g., casaImagingRoutines or -scNoiseRoutines). These routines do not know about the larger -infrastructure of arrays, targets, etc. They generally take an input -file, output file, and various keyword arguments. - -A project is defined by a series of text key files in a -"key_directory". These define the measurement set inputs, -configurations, spectral line products, moments, and derived -products. - -**User Control**: For the most part the user's job is to *define the -key files* and to run some scripts. - diff --git a/phangsPipeline/PHANGSPipelineREADME.md b/phangsPipeline/PHANGSPipelineREADME.md deleted file mode 100644 index d38c404c..00000000 --- a/phangsPipeline/PHANGSPipelineREADME.md +++ /dev/null @@ -1,118 +0,0 @@ -## README for the PHANGS Pipeline - -This README specifically describes the PHANGS CASA and python -pipeline. - -Contact: Email leroy.42@osu.edu or otherwise get in touch with -questions or suggestions. - -### REQUIRED SETUP - -You need the analysisUtils to use the ppieline. I import these -automatically by adding the following lines to my `~/.casa/init.py`: - -``` -sys.path.append("/home/maury/leroy.42/casapy/analysis_scripts/") -import analysisUtils as au -``` - -### OVERVIEW OF DATA AND DIRECTORY MODEL - -The pipeline assumes the following structure: - -working directory/ -├── scripts/ -│ ├── stage_imaging.py -│ ├── image_data.py -│ ├── process_cubes.py -│ ├── phangsPipeline/ -│ └── data_files/ -├── target1/ -├── target2/ -├── target3/ -├── ... -├── targetn/ -├── release/ -│ └── vX/ -│ ├── raw/ -│ ├── process/ -│ ├── products/ -│ └── delivery/ -└── singledish*/ - -* Optional - the single data can in theory live anywhere. - -This is not exhaustive, but gives the idea. Key points: - -* All operations assume a relative structure with scripts/ parallel to - the directories for individual targets or groups of targets. - -* The pipeline code, which the user does not have to modify, lives in - a subdirectory of scripts/ called phangsPipeline/ . - -* The data files that define targets, uv data, single dish data, - etc. for individual projects live in a subdirectory of scripts/ - called called data_files/ . A separate set of data_files can be used - to create a separate application of the pipeline for different - projects, e.g., the PHANGS-ALMA CO 2-1 surveys use one set of files, - while the dense gas mapping uses another set of files. - -* Each source ("target") has a working directory in parallel to the - scripts/ directory. The pipeline changes to that directory and works - there when working on that target. Sometimes multiple targets are - grouped together in a single directory. For example, this happens if - a single galaxy gets observed across multiple independent - mosaics. This is controlled by a data file called dir_key.txt and - entirely up to the user. - -* The user manipulates stage_imaging, image_data, etc. in the scripts/ - to run the staging, imaging, etc.. This is the main user interface - to the pipeline. - -* The process_cubes and following scripts (some still IDL) copy the - imaged data to the release/vX/ (where X is the release name) - directory and then post-process the imaging in these directories. - -* A delivery for distribution is built into the release/vX/delivery/ - directory. - -### COMMENTARY ON THE ASSUMED DATA MODEL - -The pipeline is currently built to operate on *one line at a time*, -because it assumes the ability to map between velocity and -frequency. It can operate on many different lines in serial, e.g., -CO2-1 then 13CO2-1 then C18O2-1, with the lines defined in 'line_list' -with frequencies taken from splatalogue. - -The pipeline makes some assumptions that are worth spelling out: - -* Right now a single measurement set is assumed to contain a single - target. The properties of each target are defined in a data - file. - - This could be modified by adding a field selection to the - "ms_file_key", but we do not currently have any data that require - it. - -* Right now the pipeline distinguishes between the 12m array and the - 7m array, but doesn't distinguish beyond that. Individual - measurement sets get the label, e.g., 7m_1, 7m_2, etc.. These are - grouped and processed together. - - Starting in Cycle 8, we will beging to combine multiple 12m - configurations and we should revise the formalism a little. It will - make sense to assign each measurement set to a user defined array - (e.g., "12m_ext") and separate the labeling from the array code. - -### PIPELINE CONTENTS - -The pipeline is organized as a package called phangsPipeline. Within -that there are the following modules. - -* keyHandler : this handles the data files that describe the location - of uv data, properties of targets, and more or less all the other - user-defined input to the pipeline. A KeyHandler can be intiated - pointing at different files to run the pipeline for different - projects. - - diff --git a/phangsPipeline/__init__.py b/phangsPipeline/__init__.py index bb3bb020..5ceeb0e1 100644 --- a/phangsPipeline/__init__.py +++ b/phangsPipeline/__init__.py @@ -1,36 +1,47 @@ -# Licensed under a MIT license - see LICENSE.rst +from importlib.metadata import PackageNotFoundError, version -# Packages may add whatever they like to this file, but -# should keep this content at the top. -# ---------------------------------------------------------------------------- -from ._astropy_init import * # noqa -# ---------------------------------------------------------------------------- +# Ensure CASA is installed +from .check_imports import is_casa_installed, is_spectral_cube_installed -from .casa_check import is_casa_installed casa_enabled = is_casa_installed() +spectral_cube_enabled = is_spectral_cube_installed() + +try: + __version__ = version(__name__) +except PackageNotFoundError: + __version__ = "dev" -from .phangsLogger import setup_logger -from .handlerKeys import KeyHandler -from .handlerSingleDish import SingleDishHandler from .handlerAlmaDownload import AlmaDownloadHandler -from .handlerVis import VisHandler -from .handlerPostprocess import PostProcessHandler from .handlerDerived import DerivedHandler +from .handlerKeys import KeyHandler from .handlerRelease import ReleaseHandler +from .phangsLogger import setup_logger + +__all__ = [ + "AlmaDownloadHandler", + "KeyHandler", + "ReleaseHandler", + "setup_logger", +] +# Modules that require CASA to be installed if casa_enabled: + from .handlerImagingChunked import ImagingChunkedHandler from .handlerImaging import ImagingHandler + from .handlerPostprocess import PostProcessHandler + from .handlerSingleDish import SingleDishHandler from .handlerTestImaging import TestImagingHandler + from .handlerVis import VisHandler -__all__ = ["setup_logger", "KeyHandler", "SingleDishHandler", "VisHandler", "PostProcessHandler", "DerivedHandler", - "ReleaseHandler"] + __all__.extend([ + "ImagingChunkedHandler", + "ImagingHandler", + "PostProcessHandler", + "SingleDishHandler", + "TestImagingHandler", + "VisHandler", + ]) -if casa_enabled: - __all__.append("ImagingHandler") - __all__.append("TestImagingHandler") - -try: - from .handlerAlmaDownload import AlmaDownloadHandler - __all__.append("AlmaDownloadHandler") -except ImportError: - pass +# Modules that require spectral-cube to be installed +if spectral_cube_enabled: + __all__.extend(["DerivedHandler"]) diff --git a/phangsPipeline/_astropy_init.py b/phangsPipeline/_astropy_init.py deleted file mode 100644 index 43511d9c..00000000 --- a/phangsPipeline/_astropy_init.py +++ /dev/null @@ -1,17 +0,0 @@ -# Licensed under a 3-clause BSD style license - see LICENSE.rst -import os - -__all__ = ['__version__'] - -try: - from .version import version as __version__ -except ImportError: - __version__ = '' - -# Create the test function for self test -try: - __all__ += ['test'] - from astropy.tests.runner import TestRunner - test = TestRunner.make_test_runner_in(os.path.dirname(__file__)) -except ImportError: - test = None diff --git a/phangsPipeline/casaCubeRoutines.py b/phangsPipeline/casaCubeRoutines.py index 427b0bf5..ae3cdecb 100644 --- a/phangsPipeline/casaCubeRoutines.py +++ b/phangsPipeline/casaCubeRoutines.py @@ -4,37 +4,21 @@ but also may be of general utility. """ -#region Imports and definitions - -import os -import glob import logging +import os +import analysisUtils as au import numpy as np import scipy.ndimage as nd -try: - import pyfits # CASA has pyfits, not astropy -except ImportError: - import astropy.io.fits as pyfits - -# Analysis utilities -import analysisUtils as au - -# Pipeline versioning -from .pipelineVersion import version as pipeVer +from astropy.io import fits -# CASA stuff from . import casaStuff +from . import __version__ # Logging -#from .pipelineLogger import PipelineLogger -#logger = PipelineLogger(__name__) - logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) -#endregion - #region Check getchunk putchunk memory issue def check_getchunk_putchunk_memory_issue( @@ -151,7 +135,7 @@ def copy_dropdeg( return(False) os.system('rm -rf '+temp_outfile) - importfits(fitsimage=infile, + casaStuff.importfits(fitsimage=infile, imagename=temp_outfile, zeroblanks=False, overwrite=overwrite) @@ -189,7 +173,7 @@ def get_mask(infile, huge_cube_workaround=True): # casaStuff.exportfits(imagename=infile + '.temp_mask', # fitsimage=infile + '.temp.fits', # stokeslast=False, overwrite=True) - # hdu = pyfits.open(infile + '.temp.fits')[0] + # hdu = fits.open(infile + '.temp.fits')[0] # mask = hdu.data.T[:, :, 0, :] # # os.system('rm -rf ' + infile + '.temp_deg_ordered') @@ -347,7 +331,7 @@ def multiply_cube_by_value(infile, value, brightness_unit, huge_cube_workaround= # casaStuff.exportfits(imagename=infile, # fitsimage=infile + '.fits', # overwrite=True) - # hdu = pyfits.open(infile + '.fits')[0] + # hdu = fits.open(infile + '.fits')[0] # hdu.data *= value # # hdu.writeto(infile + '.fits', overwrite=True) @@ -453,7 +437,7 @@ def export_and_cleanup( # Clean up headers - hdu = pyfits.open(outfile) + hdu = fits.open(outfile) hdr = hdu[0].header data = hdu[0].data @@ -513,13 +497,9 @@ def export_and_cleanup( logger.info("... fractional deviation: "+str(frac_dev)) # Never forget where you came from - hdr['COMMENT'] = 'Produced with PHANGS-ALMA pipeline version ' + pipeVer + hdr['COMMENT'] = 'Produced with PHANGS-ALMA pipeline version ' + __version__ - # Overwrite - try: - hdu.writeto(outfile, clobber=True) - except TypeError: - hdu.writeto(outfile, overwrite=True) + hdu.writeto(outfile, overwrite=True) return() diff --git a/phangsPipeline/casaFeatherRoutines.py b/phangsPipeline/casaFeatherRoutines.py index 114d5041..5c03846a 100644 --- a/phangsPipeline/casaFeatherRoutines.py +++ b/phangsPipeline/casaFeatherRoutines.py @@ -3,39 +3,19 @@ combination using CASA's feather. """ -#region Imports and definitions - -import os -import glob import logging +import os -import numpy as np -try: - import pyfits # CASA has pyfits, not astropy -except ImportError: - import astropy.io.fits as pyfits - -# Analysis utilities import analysisUtils as au +import numpy as np -# Pipeline versionining -from .pipelineVersion import version as pipeVer - -# CASA stuff -from . import casaStuff - -# Other pipeline stuff from . import casaCubeRoutines as ccr +from . import casaStuff # Logging -#from .pipelineLogger import PipelineLogger -#logger = PipelineLogger(__name__) - logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) -#endregion - #region Feathering and single dish routines def prep_sd_for_feather( diff --git a/phangsPipeline/casaImagingRoutines.py b/phangsPipeline/casaImagingRoutines.py index 63dda8a1..c73dda57 100644 --- a/phangsPipeline/casaImagingRoutines.py +++ b/phangsPipeline/casaImagingRoutines.py @@ -2,30 +2,16 @@ Standalone routines related to CASA imaging. """ -# region Imports and definitions - -import os -import glob, copy, inspect +import copy +import inspect import logging +import os -import numpy as np - -try: - import pyfits # CASA has pyfits, not astropy -except ImportError: - import astropy.io.fits as pyfits - -# Analysis utilities import analysisUtils as au +import numpy as np -# Pipeline versionining -from .pipelineVersion import version as pipeVer - -# CASA stuff -from . import casaStuff from . import casaMaskingRoutines as cmr - -# Clean call object +from . import casaStuff from .clean_call import CleanCall logger = logging.getLogger(__name__) diff --git a/phangsPipeline/casaLegacySingleDishRoutines.py b/phangsPipeline/casaLegacySingleDishRoutines.py index f887175b..6d93c5b2 100644 --- a/phangsPipeline/casaLegacySingleDishRoutines.py +++ b/phangsPipeline/casaLegacySingleDishRoutines.py @@ -33,70 +33,28 @@ - 2021-07-05 can not split with ant='0&0' ? if no split, can not obtain a reasonable final fits image cube?! """ - -# python2 to python3: print, sort - -# Note that some sd* commands are deleted since CASA 5. -# see https://casa.nrao.edu/casadocs/casa-5.0.0/introduction/release-notes-50 -# The following single dish tasks are renamed (name in CASA 4.7 -> 5.0). Note all tasks with 'old' -# at the end of the name will be deleted in future releases. -# tsdbaseline -> sdbaseline -# tsdcal -> sdcal -# tsdfit -> sdfit -# tsdsmooth -> sdsmooth -# sdaverage -> sdaverageold -# sdbaseline -> sdbaselineold -# sdbaseline2 -> sdbaseline2old -# sdcal -> sdcalold -# sdcal2 -> sdcal2old -# sdcoadd -> sdcoaddold -# sdfit -> sdfitold -# sdflag -> sdflagold -# sdflagmanager -> sdflagmanager -# sdgrid -> sdgridold -# sdlist -> sdlistold -# sdmath -> sdmathold -# sdplot -> sdplotold -# sdreduce -> sdreduceold -# sdsave -> sdsaveold -# sdscale -> sdscaleold -# sddstat -> sdstatold - -# ASAP data format will also be disabled since CASA 5. -# see https://casa.nrao.edu/casadocs/casa-5.4.1/single-dish-calibration/future-development-goals-for-casa-single-dish -# Use plotms to replace sdplot, -# see https://casa.nrao.edu/docs/cookbook/casa_cookbook009.html -# TODO - -#region Imports and definitions - -import os, sys, re, shutil, inspect, copy, time, datetime, json, ast -import numpy as np -from scipy.ndimage import label +import ast +import copy +import datetime import glob -import tarfile - +import json import logging -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) +import os +import re +import shutil +import tarfile +import time -# Analysis utilities import analysisUtils as au -es = au.stuffForScienceDataReduction() - -from .utilsSingleDish import getTPSampling, get_first_arr_val -#from analysisUtils import getTPSampling +import numpy as np -# CASA stuff from . import casaStuff +from .utilsSingleDish import getTPSampling, get_first_arr_val -# Spectral lines -from . import utilsLines as lines - -# Pipeline versionining -from .pipelineVersion import version as pipeVer +es = au.stuffForScienceDataReduction() -#endregion +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) #region Routines for basic characterization @@ -115,14 +73,7 @@ path_raw = '../raw/' # Path to the raw folder. path_dataproduct = '../data/' # Path to data products. - -# precasa5 -if hasattr(casaStuff, 'sdsave'): - precasa5 = True - fsuffix = '.asap' -else: - precasa5 = False - fsuffix = '.ms' +fsuffix = '.ms' # Check if data was calibrated with the pipeline def checkpipeline(): @@ -270,45 +221,24 @@ def scaleAutocorr(vis, scale=1., antenna='', spw='', field='', scan=''): mymsmd.close() - if precasa5: - datacolumn = getDataColumnName(vis) - - logger.info("Multiplying %s to the dataset %s column %s." % (str(scale), vis, datacolumn)) - logger.info("The selection criteria are '%s'." % (" && ".join(conditions))) - - mytb.open(vis, nomodify=False) - subtb = mytb.query(" && ".join(conditions)) + logger.info("Opening the table "+vis) + mytb.open(vis, nomodify=False) + subtb = mytb.query(" && ".join(conditions)) + datacolumns = [] + for datacolumn in subtb.colnames(): + if datacolumn in ['DATA','FLOAT_DATA','MODEL_DATA','CORRECTED_DATA']: + datacolumns.append(datacolumn) + for datacolumn in datacolumns: try: data = subtb.getcol(datacolumn) logger.info("Dimension of the selected data: %s" % str(data.shape)) subtb.putcol(datacolumn, data*scale) except: - logger.info("An error occurred upon reading/writing the data.") - finally: - logger.info("Closing the table.") - mytb.flush() - subtb.close() - mytb.close() - else: - - logger.info("Opening the table "+vis) - mytb.open(vis, nomodify=False) - subtb = mytb.query(" && ".join(conditions)) - datacolumns = [] - for datacolumn in subtb.colnames(): - if datacolumn in ['DATA','FLOAT_DATA','MODEL_DATA','CORRECTED_DATA']: - datacolumns.append(datacolumn) - for datacolumn in datacolumns: - try: - data = subtb.getcol(datacolumn) - logger.info("Dimension of the selected data: %s" % str(data.shape)) - subtb.putcol(datacolumn, data*scale) - except: - logger.info("An error occurred upon reading/writing the data column "+datacolumn+"! The scaleAutocorr function may have failed!") - logger.info("Closing the table.") - mytb.flush() - subtb.close() - mytb.close() + logger.info("An error occurred upon reading/writing the data column "+datacolumn+"! The scaleAutocorr function may have failed!") + logger.info("Closing the table.") + mytb.flush() + subtb.close() + mytb.close() # Create vector with antenna names def read_ants_names(filename): @@ -795,7 +725,6 @@ def import_and_split_ant(filename, precycle7=True, doallants=True, dosplitants=T Args: filename (str): The data name for output with suffix ".ms". Does not include the file path, which should be defined in the global variable `path_raw`. precycle7 (bool): Whether the data is taken pre-Cycle7, i.e., Cycle 0-6. - precasa5 (bool): Whether using pre-CASA5 versions, i.e., CASA 3.X.X-4.X.X. doallants (bool): Whether making an MS data with all antennae in it. """ # Can we be more smart on defining the precycle7 variable? @@ -843,7 +772,7 @@ def import_and_split_ant(filename, precycle7=True, doallants=True, dosplitants=T process_caldevice=False, with_pointing_correction=True) - if precycle7 and precasa5: + if precycle7: # Transfer specific flags (BDF flags) from the ADSM to the MS file logger.info(os.environ['CASAPATH'].split()[0]+'/bin/bdflags2MS -f "COR DELA INT MIS SIG SYN TFB WVR ZER" '+filename0+' '+filename) @@ -902,10 +831,6 @@ def import_and_split_ant(filename, precycle7=True, doallants=True, dosplitants=T if doallants: cp_data_dir(filename, filename+'.allant'+fsuffix) - # if precasa5, always dosplitants - if precasa5: - dosplitants = True - # if dosplitants, make an MS for each antenna, with a file name like filename+'.'+ant+fsuffix if dosplitants: # 1.4 Split by antenna @@ -915,65 +840,49 @@ def import_and_split_ant(filename, precycle7=True, doallants=True, dosplitants=T vec_ants = [s for s in vec_ants_t if any(xs in s for xs in ['PM','DV'])] for ant in vec_ants: rm_data_dir(filename+'.'+ant+fsuffix) - if precasa5: - casaStuff.sdsave(infile = filename, - splitant = True, - outfile = filename+fsuffix, - overwrite = True) - # note that output file names will be filename+'.'+ant+fsuffix - - #1.5 sdlist - logger.info("1.5 Create sdlist for each splitted file.") - for ant in vec_ants: - if os.path.exists('obs_lists/'+filename+'.'+ant+fsuffix+'.sdlist'): - os.remove('obs_lists/'+filename+'.'+ant+fsuffix+'.sdlist') - casaStuff.sdlist(infile = filename+'.'+ant+fsuffix+'', - outfile = 'obs_lists/'+filename+'.'+ant+fsuffix+'.sdlist') - else: - - for ant in vec_ants: - use_casa_split_antenna = True - if use_casa_split_antenna: - logger.info('Running split to make '+filename+'.'+ant+fsuffix+', datacolumn is '+getDataColumnForSplit(filename)) - casaStuff.split(vis = filename, - outputvis = filename+'.'+ant+fsuffix, - antenna = '%s&&&'%(ant), - datacolumn = getDataColumnForSplit(filename)) - ## CASA split with antenna = '0&0' does not work, should use '0&&&' to get only autocorrelations, - # see https://casa.nrao.edu/docs/taskref/split-task.html - else: - ## these are not well tested - # this is an alternative way to split single antenna autocorr data - filename_in = filename - filename_out = filename+'.'+ant+fsuffix+'.tmp' - cp_data_dir(filename_in, filename_out) - # - other_ants = copy.copy(vec_ants) - other_ants.remove(ant) - str_other_ants = ';'.join(other_ants) - logger.info('Running flagdata to flag '+str_other_ants+' in '+filename_out) - casaStuff.flagdata(vis = filename_out, - mode = 'manual', - antenna = str_other_ants, - action = 'apply') - # - filename_in = filename+'.'+ant+fsuffix+'.tmp' - filename_out = filename+'.'+ant+fsuffix - rm_data_dir(filename_out) - logger.info('Running split to make '+filename_out+', datacolumn is '+getDataColumnForSplit(filename_in)) - casaStuff.split(vis = filename_in, - outputvis = filename_out, - keepflags = False, - datacolumn = getDataColumnForSplit(filename_in)) - - #1.5 sdlist - logger.info("1.5 Create listobs for each splitted file.") - for ant in vec_ants: - if os.path.exists('obs_lists/'+filename+'.'+ant+fsuffix+'.listobs.txt'): - os.remove('obs_lists/'+filename+'.'+ant+fsuffix+'.listobs.txt') - casaStuff.listobs(vis = filename+'.'+ant+fsuffix+'', - listfile = 'obs_lists/'+filename+'.'+ant+fsuffix+'.listobs.txt') + for ant in vec_ants: + use_casa_split_antenna = True + if use_casa_split_antenna: + logger.info('Running split to make '+filename+'.'+ant+fsuffix+', datacolumn is '+getDataColumnForSplit(filename)) + casaStuff.split(vis = filename, + outputvis = filename+'.'+ant+fsuffix, + antenna = '%s&&&'%(ant), + datacolumn = getDataColumnForSplit(filename)) + ## CASA split with antenna = '0&0' does not work, should use '0&&&' to get only autocorrelations, + # see https://casa.nrao.edu/docs/taskref/split-task.html + else: + ## these are not well tested + # this is an alternative way to split single antenna autocorr data + filename_in = filename + filename_out = filename+'.'+ant+fsuffix+'.tmp' + cp_data_dir(filename_in, filename_out) + # + other_ants = copy.copy(vec_ants) + other_ants.remove(ant) + str_other_ants = ';'.join(other_ants) + logger.info('Running flagdata to flag '+str_other_ants+' in '+filename_out) + casaStuff.flagdata(vis = filename_out, + mode = 'manual', + antenna = str_other_ants, + action = 'apply') + # + filename_in = filename+'.'+ant+fsuffix+'.tmp' + filename_out = filename+'.'+ant+fsuffix + rm_data_dir(filename_out) + logger.info('Running split to make '+filename_out+', datacolumn is '+getDataColumnForSplit(filename_in)) + casaStuff.split(vis = filename_in, + outputvis = filename_out, + keepflags = False, + datacolumn = getDataColumnForSplit(filename_in)) + + #1.5 sdlist + logger.info("1.5 Create listobs for each splitted file.") + for ant in vec_ants: + if os.path.exists('obs_lists/'+filename+'.'+ant+fsuffix+'.listobs.txt'): + os.remove('obs_lists/'+filename+'.'+ant+fsuffix+'.listobs.txt') + casaStuff.listobs(vis = filename+'.'+ant+fsuffix+'', + listfile = 'obs_lists/'+filename+'.'+ant+fsuffix+'.listobs.txt') with open('done_step_1_for_'+filename[0:-3], 'w') as outlogfile: @@ -1133,59 +1042,36 @@ def counts2kelvin(filename, ant_list=None, spws_info=None, spwmap=None, doplots= rm_data_dir(filename_out) - if precasa5: - - logger.info('Running sdcal2 to make '+filename_out) - casaStuff.sdcal2(infile = filename_in, - calmode = 'ps,tsys,apply', - spw = spws_all_str, - tsysspw = spws_tsys_str, - spwmap = spwmap, - outfile = filename_out, - overwrite = True) - - if doplots == True: - es.SDcheckSpectra(filename_out, spwIds=spws_scie_str, interactive=False) - - else: + cp_data_dir(filename_in, filename_out) - cp_data_dir(filename_in, filename_out) + logger.info('Running sdcal to make '+filename_out) + casaStuff.sdcal(infile = filename_out, + calmode = 'ps,tsys,apply', + spw = spws_all_str, + spwmap = spwmap, + outfile = filename_out, + overwrite = True, + ) + # -- https://casa.nrao.edu/casadocs/casa-5.4.1/single-dish-calibration/single-dish-data-calibration-and-reduction + # Note that we didn't specify the Tsys spectral windows in the call to sdcal. + # For ALMA single-dish data from Cycle 3 onward, this is okay since the Tsys + # and science data share the same spectral window. + # Alternatively, the mapping between the Tsys + # and science spectral windows can be explicitly set with spwmap and spw. + # In this case, we would use: + # sdcal(infile=vis, calmode='ps,tsys,apply', spwmap={17:[17], 19:[19], 21:[21],23:[23]}, spw='17,19,21,23') - logger.info('Running sdcal to make '+filename_out) - casaStuff.sdcal(infile = filename_out, - calmode = 'ps,tsys,apply', - spw = spws_all_str, - spwmap = spwmap, - outfile = filename_out, - overwrite = True, - ) - # -- https://casa.nrao.edu/casadocs/casa-5.4.1/single-dish-calibration/single-dish-data-calibration-and-reduction - # Note that we didn't specify the Tsys spectral windows in the call to sdcal. - # For ALMA single-dish data from Cycle 3 onward, this is okay since the Tsys - # and science data share the same spectral window. - # Alternatively, the mapping between the Tsys - # and science spectral windows can be explicitly set with spwmap and spw. - # In this case, we would use: - # sdcal(infile=vis, calmode='ps,tsys,apply', spwmap={17:[17], 19:[19], 21:[21],23:[23]}, spw='17,19,21,23') - - if doplots == True: - es.SDcheckSpectra(filename_out, msName=filename_out, spwIds=spws_scie_str, interactive=False) - # must use new analysisUtils.py with getCasaVersion() - # this will create plot files in directory filename_out+'.plots' - # note that these plots are uncalibrated + if doplots == True: + es.SDcheckSpectra(filename_out, msName=filename_out, spwIds=spws_scie_str, interactive=False) + # must use new analysisUtils.py with getCasaVersion() + # this will create plot files in directory filename_out+'.plots' + # note that these plots are uncalibrated apply_nl = check_date_nonlinearity(filename) if apply_nl == True: logger.info("3.2 Applying non-linearity correction factor if data were obtained before the 2015-10-01") - if precasa5: - casaStuff.sdscale(infile = filename_out, - outfile = filename_out, - factor = 1.25, - overwrite=True) - else: - #raise Exception('Data need pre-CASA-5 version for sdscale!') - pass ## this is for debug, uncomment this! + logger.warning("This only works in pre-CASA 5!") # end for ant loop @@ -1263,24 +1149,17 @@ def extract_cube(filename, source, name_line, ant_list=None, freq_rest=None, spw os.remove(plotfile) if not os.path.exists(plotfile): logger.info("4.1 Plotting each spw") - if precasa5: - logger.info('Running sdplot to make '+plotfile) - casaStuff.sdplot(infile=filename_in, - plottype='spectra', specunit='channel', - timeaverage=True, stack='p', - outfile=plotfile) - else: - logger.info('Running plotms to make '+plotfile) - casaStuff.plotms(vis=filename_in, - ydatacolumn=getDataColumnForPlotMS(filename_in), - intent='OBSERVE_TARGET#ON_SOURCE', - field=source, spw=str(spw_line), - averagedata=True, avgtime='86400', avgscan=True, - xaxis='vel', yaxis='amp', coloraxis='ant1', showlegend=True, - iteraxis='corr', xselfscale=True, xsharedaxis=True, gridrows=2, - highres=True, dpi=300, showmajorgrid=True, majorstyle='dot', - plotfile=plotfile, overwrite=True, - ) + logger.info('Running plotms to make '+plotfile) + casaStuff.plotms(vis=filename_in, + ydatacolumn=getDataColumnForPlotMS(filename_in), + intent='OBSERVE_TARGET#ON_SOURCE', + field=source, spw=str(spw_line), + averagedata=True, avgtime='86400', avgscan=True, + xaxis='vel', yaxis='amp', coloraxis='ant1', showlegend=True, + iteraxis='corr', xselfscale=True, xsharedaxis=True, gridrows=2, + highres=True, dpi=300, showmajorgrid=True, majorstyle='dot', + plotfile=plotfile, overwrite=True, + ) # Get the string of the channels to be extracted from the original cube coords = read_source_coordinates(filename,source) @@ -1291,36 +1170,20 @@ def extract_cube(filename, source, name_line, ant_list=None, freq_rest=None, spw rm_data_dir(filename_out) - if precasa5: - logger.info('Running sdsave to make '+filename_out) - casaStuff.sdsave(infile=filename_in, - field=source, - spw=spw_extr, - outfile=filename_out) - - listfile = 'obs_list/'+filename_out+'.list' - if os.path.exists(listfile): - logger.info('Deleting '+listfile) - os.remove(listfile) - logger.info('Running sdlist to make '+listfile) - casaStuff.sdlist(infile=filename_out, - outfile=listfile) - - else: - logger.info('Running split to make '+filename_out+', datacolumn is '+getDataColumnForSplit(filename_in)) - casaStuff.split(vis=filename_in, - field=source, - spw=spw_extr, - outputvis=filename_out, - datacolumn=getDataColumnForSplit(filename_in)) - - listfile = 'obs_list/'+filename_out+'.listobs.txt' - if os.path.exists(listfile): - logger.info('Deleting '+listfile) - os.remove(listfile) - logger.info('Running listobs to make '+listfile) - casaStuff.listobs(vis=filename_out, - listfile=listfile) + logger.info('Running split to make '+filename_out+', datacolumn is '+getDataColumnForSplit(filename_in)) + casaStuff.split(vis=filename_in, + field=source, + spw=spw_extr, + outputvis=filename_out, + datacolumn=getDataColumnForSplit(filename_in)) + + listfile = 'obs_list/'+filename_out+'.listobs.txt' + if os.path.exists(listfile): + logger.info('Deleting '+listfile) + os.remove(listfile) + logger.info('Running listobs to make '+listfile) + casaStuff.listobs(vis=filename_out, + listfile=listfile) if doplots == True: logger.info("4.3 Plotting the line spectrum averaged in time") @@ -1331,26 +1194,17 @@ def extract_cube(filename, source, name_line, ant_list=None, freq_rest=None, spw plotfile = 'plots/'+filename_out+'.line.'+name_line2+'.spec.png' if os.path.exists(plotfile): os.remove(plotfile) - if precasa5: - logger.info('Running sdplot to make '+plotfile) - casaStuff.sdplot(infile=filename_out, - plottype='spectra', specunit='km/s', - restfreq=str(freq_rest)+'MHz', - timeaverage=True, stack='p', - polaverage=True, - outfile=plotfile) # no outfile? - else: - logger.info('Running plotms to make '+plotfile) - casaStuff.plotms(vis=filename_out, - ydatacolumn=getDataColumnForPlotMS(filename_out), - intent='OBSERVE_TARGET#ON_SOURCE', - restfreq=str(freq_rest)+'MHz', - averagedata=True, avgtime='86400', avgscan=True, - xaxis='vel', yaxis='amp', coloraxis='ant1', showlegend=True, - iteraxis='corr', xselfscale=True, xsharedaxis=True, gridrows=2, - highres=True, dpi=300, showmajorgrid=True, majorstyle='dot', - plotfile=plotfile, overwrite=True, - ) + logger.info('Running plotms to make '+plotfile) + casaStuff.plotms(vis=filename_out, + ydatacolumn=getDataColumnForPlotMS(filename_out), + intent='OBSERVE_TARGET#ON_SOURCE', + restfreq=str(freq_rest)+'MHz', + averagedata=True, avgtime='86400', avgscan=True, + xaxis='vel', yaxis='amp', coloraxis='ant1', showlegend=True, + iteraxis='corr', xselfscale=True, xsharedaxis=True, gridrows=2, + highres=True, dpi=300, showmajorgrid=True, majorstyle='dot', + plotfile=plotfile, overwrite=True, + ) # end for ant loop @@ -1444,28 +1298,17 @@ def baseline(filename, source, name_line, ant_list=None, freq_rest=None, spws_in plotfile = 'plots/'+filename_out+'_baseline_corrected.png' if os.path.exists(plotfile): os.remove(plotfile) - if precasa5: - logger.info('Running sdplot to make '+plotfile) - casaStuff.sdplot(infile=filename_out, - plottype='spectra', - specunit='km/s', - restfreq=str(freq_rest)+'MHz', - timeaverage=True, - stack='p', - outfile=plotfile, - polaverage=True) - else: - logger.info('Running plotms to make '+plotfile) - casaStuff.plotms(vis=filename_out, - ydatacolumn=getDataColumnForPlotMS(filename_out), - intent='OBSERVE_TARGET#ON_SOURCE', - restfreq=str(freq_rest)+'MHz', - averagedata=True, avgtime='86400', avgscan=True, - xaxis='vel', yaxis='amp', coloraxis='ant1', showlegend=True, - iteraxis='corr', xselfscale=True, xsharedaxis=True, gridrows=2, - highres=True, dpi=300, showmajorgrid=True, majorstyle='dot', - plotfile=plotfile, overwrite=True, - ) + logger.info('Running plotms to make '+plotfile) + casaStuff.plotms(vis=filename_out, + ydatacolumn=getDataColumnForPlotMS(filename_out), + intent='OBSERVE_TARGET#ON_SOURCE', + restfreq=str(freq_rest)+'MHz', + averagedata=True, avgtime='86400', avgscan=True, + xaxis='vel', yaxis='amp', coloraxis='ant1', showlegend=True, + iteraxis='corr', xselfscale=True, xsharedaxis=True, gridrows=2, + highres=True, dpi=300, showmajorgrid=True, majorstyle='dot', + plotfile=plotfile, overwrite=True, + ) os.system('mv *blparam.txt obs_lists/') @@ -1520,15 +1363,7 @@ def concat_ants(filename, name_line, ant_list=None, freq_rest=None, spws_info=No filename_in = filename+'.'+ant+fin filename_out = filename+'.'+ant+finout rm_data_dir(filename_out) - if precasa5: - # Converting from ASAP to MS - logger.info("6.1 Converting from ASAP to MS") - logger.info('Running sdsave to make '+filename_out) - casaStuff.sdsave(infile = filename_in, - outfile = filename_out, - outform='MS2') - else: - cp_data_dir(filename_in, filename_out) # they are all *.ms, just copy it over + cp_data_dir(filename_in, filename_out) # they are all *.ms, just copy it over lis_fils.append(filename_out) # Concatenation logger.info("6.2 Concatenating antennas") @@ -1553,10 +1388,7 @@ def concat_ants(filename, name_line, ant_list=None, freq_rest=None, spws_info=No logger.info('Running scaleAutocorr on '+filename+'.cal.jy'+'.'+name_line) for ant in jyperk.keys(): logger.info('ant: %s, spw_line: %s, jyperk[ant][spw_line][\'mean\']: %s'%(ant, spw_line, jyperk[ant][spw_line]['mean'])) - if precasa5: - scaleAutocorr(vis=filename+'.cal.jy'+'.'+name_line, scale=jyperk[ant][spw_line]['mean'], antenna=ant, spw=spw_line) # in asap spw number does not change after split? - else: - scaleAutocorr(vis=filename+'.cal.jy'+'.'+name_line, scale=jyperk[ant][spw_line]['mean'], antenna=ant, spw=0) # spw is always 0 + scaleAutocorr(vis=filename+'.cal.jy'+'.'+name_line, scale=jyperk[ant][spw_line]['mean'], antenna=ant, spw=0) # spw is always 0 # Rename line spw to spw=0 @@ -1569,14 +1401,9 @@ def concat_ants(filename, name_line, ant_list=None, freq_rest=None, spws_info=No finout = '.cal.jy'+'.'+name_line rm_data_dir(filename+finout) logger.info('Running split to make '+filename+finout+', datacolumn is '+getDataColumnForSplit(filename+fin)) - if precasa5: - casaStuff.split(vis=filename+fin, - outputvis=filename+finout, - datacolumn='all') - else: - casaStuff.split(vis=filename+fin, - outputvis=filename+finout, - datacolumn=getDataColumnForSplit(filename+fin)) + casaStuff.split(vis=filename+fin, + outputvis=filename+finout, + datacolumn=getDataColumnForSplit(filename+fin)) # listobs @@ -2006,8 +1833,7 @@ def run_ALMA_TP_tools( ) if not dosplitants: - if not precasa5: - vec_ants = None + vec_ants = None # Remove known problematic datasets if vec_ants is not None: diff --git a/phangsPipeline/casaMaskingRoutines.py b/phangsPipeline/casaMaskingRoutines.py index 4c4546d9..bad36a1f 100644 --- a/phangsPipeline/casaMaskingRoutines.py +++ b/phangsPipeline/casaMaskingRoutines.py @@ -3,41 +3,20 @@ mask manipulation steps in CASA. """ -# -# 20200210 dzliu: moved "stat_clean_cube()" to here, as it is required by "signal_mask()" -# 20200210 dzliu: changed "casa." to "casaStuff.", as "casa" is a dict used by CASA itself. -# 20200210 dzliu: changed "print +(.*)$" to "logger.info(\1)" -# - -# region Imports and definitions - -import os -import glob import logging +import os +import analysisUtils as au import numpy as np import scipy.ndimage as ndimage +from astropy.io import fits from scipy.special import erfc -try: - import pyfits # CASA has pyfits, not astropy -except ImportError: - import astropy.io.fits as pyfits - -# Analysis utilities -import analysisUtils as au - -# Pipeline versionining -from .pipelineVersion import version as pipeVer - -# CASA stuff from . import casaStuff logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) -# endregion - # region Noise estimation def mad( @@ -237,7 +216,7 @@ def read_cube(infile, huge_cube_workaround=True): casaStuff.exportfits(imagename=infile, fitsimage=infile + '.fits', stokeslast=False, overwrite=True) - hdu = pyfits.open(infile + '.fits')[0] + hdu = fits.open(infile + '.fits')[0] cube = hdu.data.T # Remove intermediate fits file @@ -263,7 +242,7 @@ def write_mask(infile, outfile, mask, huge_cube_workaround=True): casaStuff.exportfits(imagename=outfile, fitsimage=outfile + '.fits', stokeslast=False, overwrite=True) - hdu = pyfits.open(outfile + '.fits')[0] + hdu = fits.open(outfile + '.fits')[0] hdu.data = mask.T hdu.header['BITPIX'] = -32 @@ -275,11 +254,7 @@ def write_mask(infile, outfile, mask, huge_cube_workaround=True): for wcs_name in wcs_names: hdu.header[wcs_name.upper()] = header[wcs_name] - # Variations between pyfits and astropy - try: - hdu.writeto(outfile + '.fits', clobber=True) - except TypeError: - hdu.writeto(outfile + '.fits', overwrite=True) + hdu.writeto(outfile + '.fits', overwrite=True) casaStuff.importfits(fitsimage=outfile + '.fits', imagename=outfile, diff --git a/phangsPipeline/casaMosaicRoutines.py b/phangsPipeline/casaMosaicRoutines.py index a9d4742a..739ad1c5 100644 --- a/phangsPipeline/casaMosaicRoutines.py +++ b/phangsPipeline/casaMosaicRoutines.py @@ -2,36 +2,20 @@ Standalone routines related to linear mosaicking of multi-part mosaics in CASA. """ -#region Imports and definitions import copy -import os -import glob import logging +import os -import numpy as np -try: - import pyfits # CASA has pyfits, not astropy -except ImportError: - import astropy.io.fits as pyfits - -# Analysis utilities import analysisUtils as au +import numpy as np -# Pipeline versionining -from .pipelineVersion import version as pipeVer - -# CASA stuff -from . import casaStuff - -# Other pipeline stuff from . import casaMaskingRoutines as cma +from . import casaStuff logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) -#endregion - #region Routines to match resolution def common_res_for_mosaic( diff --git a/phangsPipeline/casaRoutineTests.py b/phangsPipeline/casaRoutineTests.py index 1b95cae9..cfdecb78 100644 --- a/phangsPipeline/casaRoutineTests.py +++ b/phangsPipeline/casaRoutineTests.py @@ -6,36 +6,16 @@ directory. """ -#region Imports and definitions - -import os -import glob import logging -import numpy as np -from scipy.special import erfc -import pyfits # CASA has pyfits, not astropy - -# Analysis utilities import analysisUtils as au +import numpy as np -# Pipeline versionining -from .pipelineVersion import version as pipeVer - -# CASA stuff -from . import casaStuff - -# Pipeline CASA routines -from . import casaCubeRoutines as ccr from . import casaMaskingRoutines as cma -from . import casaMosaicRoutines as cmr -from . import casaFeatherRoutines as cfr logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) -#endregion - def test_estimate_noise( ): """ diff --git a/phangsPipeline/casaSingleDishALMAWrapper.py b/phangsPipeline/casaSingleDishALMAWrapper.py index a7ee65ee..d7dc7e36 100644 --- a/phangsPipeline/casaSingleDishALMAWrapper.py +++ b/phangsPipeline/casaSingleDishALMAWrapper.py @@ -1,23 +1,20 @@ - - -import os import glob -import tarfile +import logging +import os import shutil -import numpy as np - -from astropy.table import Table +import tarfile import analysisUtils as au - -import logging -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) +import astropy.units as u +import numpy as np +from astropy.table import Table from . import casaLegacySingleDishRoutines as csdr from . import casaStuff from .utilsSingleDish import getTPSampling +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) # path constants path_calibration = '../calibration/' @@ -249,7 +246,6 @@ def runALMAPipeline(path_galaxy, # Create baseline dict with freq ranges to mask: if baseline_linewindow is None: - import astropy.units as u # Construct the line window via spw:low~high strings based on the # product_dict. diff --git a/phangsPipeline/casaStuff.py b/phangsPipeline/casaStuff.py index 49b65634..ea067159 100644 --- a/phangsPipeline/casaStuff.py +++ b/phangsPipeline/casaStuff.py @@ -1,209 +1,85 @@ # CASA imports +import casashell +import casatasks +import casatools +import casaplotms +from almahelpers_localcopy import tsysspwmap +from casatasks import ( + casalog, + concat, + exportfits, + feather, + flagcmd, + flagdata, + gencal, + imhead, + immath, + impbcor, + importasdm, + importfits, + imrebin, + imregrid, + imsmooth, + imstat, + imsubimage, + imtrans, + imval, + listobs, + makemask, + mstransform, + plotbandpass, + rmtables, + sdbaseline, + sdcal, + split, + statwt, + tclean, + tsdimaging, + uvcontsub, + visstat, +) +from casatasks.private import sdint_helper +from casatools import ( + table, + image, + imager, + msmetadata, + synthesisimager, + synthesisutils, + regionmanager, + measures, + quanta, +) -# This is a huge pain. Check that it works correctly by running - -# casapy-XYZ -c casaStuff.py --nologger - -# with XYZ each relevant version. - -# AKL - checked with 6.5, 6.4, 6.3, 6.2.1, 5.8, 5.7, 5.6.1, 5.4, 5.3, 5.1.1, 5.0, 4.7.2, 4.5.3 -# TGW - checked with 6.7 try: - from packaging import version -except ImportError: - class version: - def parse(self, vstr): - return tuple(map(int, vstr.replace('-','.').split('.')[0:3])) - -# Obtain a version tuple -if ("casa" in locals()) or ("casa" in globals()): - casa_version = tuple( - map(int, casa["build"]["version"].replace("-", ".").split(".")[0:3]) - ) # tested CASA 4, 5 - casa_version_str = ".".join( - [str(casa_version_no) for casa_version_no in casa_version] - ) -else: - # This works in CASA 6 where the casatools has a version attribute - import casatools - - casa_version = ( - casatools.version()[0], - casatools.version()[1], - casatools.version()[2], - ) - casa_version_str = ".".join( - [str(casa_version_no) for casa_version_no in casa_version] - ) - -print("CASA version: ", casa_version_str) - -# Import specific CASA tasks - -# Imports for CASA versions above 6 -if casa_version[0] >= 6: - - import casatools - from casatools import ( - table, - image, - imager, - msmetadata, - synthesisimager, - synthesisutils, - regionmanager, - measures, - quanta, - ) - - import casatasks - from casatasks import ( - casalog, - concat, - exportfits, - feather, - flagcmd, - flagdata, - gencal, - imhead, - immath, - impbcor, - importasdm, - importfits, - imrebin, - imregrid, - imsmooth, - imstat, - imsubimage, - imtrans, - imval, - listobs, - makemask, - mstransform, - rmtables, - sdbaseline, - sdcal, - split, - statwt, - tclean, - tsdimaging, - uvcontsub, - visstat, - ) - - # sdintimaging imports - from casatasks.private import sdint_helper - - # singledish processing imports - # see some documents at - # - https://casadocs.readthedocs.io/en/stable/api/casatasks.html?highlight=sdcal#single-dish - # - https://casadocs.readthedocs.io/en/stable/notebooks/synthesis_calibration.html?highlight=recipes - iatool = image - rgtool = regionmanager - imtool = imager - msmdtool = msmetadata - tbtool = table - metool = measures - qatool = quanta - - import casaplotms - - # Depending on version, imports can be different - try: - plotms = casaplotms.gotasks.plotms.plotms - except AttributeError: - plotms = casaplotms.plotms - - try: - import casaviewer - except (ImportError, ModuleNotFoundError): - casaviewer = None - print("Could not import casaviewer") - - if casaviewer is not None: - try: - viewer = casaviewer.gotasks.imview.imview - except AttributeError: - viewer = casaviewer.imview - - import casashell - - try: - gencal = casashell.private.gencal.gencal - except AttributeError: - from casatasks import gencal - - try: - plotbandpass = casashell.private.plotbandpass.plotbandpass - except AttributeError: - from casatasks import plotbandpass - - try: - sdbaseline = casashell.private.sdbaseline.sdbaseline - except AttributeError: - from casatasks import sdbaseline - - try: - sdcal = casashell.private.sdcal.sdcal - except AttributeError: - from casatasks import sdcal - - # tsysspwmap import - from almahelpers_localcopy import tsysspwmap - -if casa_version[0] < 6: - from taskinit import * - - from concat import concat - from exportfits import exportfits - from feather import feather - from flagcmd import flagcmd - from flagdata import flagdata - from gencal import gencal - from imhead import imhead - from immath import immath - from impbcor import impbcor - from importasdm import importasdm - from importfits import importfits - from imrebin import imrebin - from imregrid import imregrid - from imsmooth import imsmooth - from imstat import imstat - from imsubimage import imsubimage - from imtrans import imtrans - from imval import imval - from listobs import listobs - from makemask import makemask - from mstransform import mstransform - from plotbandpass import plotbandpass - from plotms import plotms - from sdbaseline import sdbaseline - from sdcal import sdcal - from sdimaging import sdimaging - from split import split - from statwt import statwt - from taskinit import metool - from taskinit import msmdtool - from taskinit import qatool - from taskinit import tbtool - from tclean import tclean - from uvcontsub import uvcontsub - from viewer import viewer - from visstat import visstat - - from recipes.almahelpers import tsysspwmap - -# Imports for singledish processing when CASA version < 5 -if casa_version[0] < 5: - from sdsave import sdsave - from sdlist import sdlist - from sdcal2 import sdcal2 - from sdscale import sdscale - from sdplot import sdplot - -# sdintimaging import -if (casa_version[0] >= 6): - from .taskSDIntImaging import sdintimaging - # TODO: switch to casatask version of sdintimaging - # from casatasks import sdintimaging - + import casaviewer +except (ImportError, ModuleNotFoundError): + casaviewer = None + print("Could not import casaviewer") + +# TODO: Move back to CASA task +from .taskSDIntImaging import sdintimaging + +# Get CASA version +casa_version = ( + casatools.version()[0], + casatools.version()[1], + casatools.version()[2], +) +casa_version_str = ".".join( + [str(casa_version_no) for casa_version_no in casa_version] +) + +print(f"CASA version: {casa_version_str}") + +iatool = image +rgtool = regionmanager +imtool = imager +msmdtool = msmetadata +tbtool = table +metool = measures +qatool = quanta + +plotms = casaplotms.plotms +if casaviewer is not None: + viewer = casaviewer.imview diff --git a/phangsPipeline/casaVisRoutines.py b/phangsPipeline/casaVisRoutines.py index 0b53422a..fa787e02 100644 --- a/phangsPipeline/casaVisRoutines.py +++ b/phangsPipeline/casaVisRoutines.py @@ -2,31 +2,17 @@ Standalone routines to analyze and manipulate visibilities. """ -# 20200226: introduced os.mkdir(outfile+'.touch') os.rmdir(outfile+'.touch') -# 20200226: to make sure we can handle sudden system break. - -import os -import shutil -import inspect import glob import logging -import sys -from packaging import version - +import os +import shutil +import analysisUtils as au import numpy as np +from packaging import version from scipy.ndimage import label -# Analysis utilities -import analysisUtils as au - -# Pipeline versioning -from .pipelineVersion import version as pipeVer - -# CASA stuff from . import casaStuff - -# Spectral lines from . import utilsLines as lines logger = logging.getLogger(__name__) @@ -460,20 +446,18 @@ def contsub( ) # uvcontsub, this outputs infile+'.contsub' - # Pre 6.5.2 if version.parse(casaStuff.casa_version_str) < version.parse('6.5.2'): - uvcontsub_params = { 'vis': infile, 'fitspw': spw_flagging_string, - 'excludechans': False, # now uses complement for channel selection. + 'excludechans': False, # now uses complement for channel selection. 'combine': combine, 'fitorder': fitorder, 'solint': solint, 'want_cont': False} + # Post 6.5.2 else: - # Post 6.5.2 uvcontsub_params = { 'vis': infile, 'outputvis': outfile, @@ -1719,30 +1703,16 @@ def reweight_data( if exclude_str != '': logger.info("... running statwt with exclusion: "+exclude_str) - # Build the statwt call. Use the appropriate command for getting arguments - if sys.version_info >= (3, 11, 0): - getargspec = inspect.getfullargspec + # Build the statwt call + if exclude_str == '': + excludechans = False else: - getargspec = inspect.getargspec - - if 'fitspw' in getargspec(casaStuff.statwt)[0]: - # CASA version somewhat >= 5.5.0 - if exclude_str == '': - excludechans = False - else: - excludechans = True - statwt_params = { - 'vis': infile, 'timebin': '0.001s', 'slidetimebin': False, - 'chanbin': 'spw', 'statalg': 'classic', 'datacolumn': datacolumn, - 'fitspw': exclude_str, 'excludechans': excludechans, - } - else: - # CASA version <= 5.4.1 - statwt_params = { - 'vis': infile, 'timebin': '0.001s', 'slidetimebin': False, - 'chanbin': 'spw', 'statalg': 'classic', 'datacolumn': datacolumn, - 'excludechans': exclude_str, - } + excludechans = True + statwt_params = { + 'vis': infile, 'timebin': '0.001s', 'slidetimebin': False, + 'chanbin': 'spw', 'statalg': 'classic', 'datacolumn': datacolumn, + 'fitspw': exclude_str, 'excludechans': excludechans, + } # Run the call if not os.path.isdir(infile+'.touch'): diff --git a/phangsPipeline/casa_check.py b/phangsPipeline/check_imports.py similarity index 54% rename from phangsPipeline/casa_check.py rename to phangsPipeline/check_imports.py index c4c2788a..4cd89238 100644 --- a/phangsPipeline/casa_check.py +++ b/phangsPipeline/check_imports.py @@ -1,25 +1,25 @@ - -# Is CASA installed? - def is_casa_installed(): + """Check if CASA is installed.""" casa_enabled = False - - # CASA 5 try: - import taskinit + import casatasks + import casatools casa_enabled = True - return casa_enabled except (ImportError, ModuleNotFoundError): pass - # CASA 6 + return casa_enabled + +def is_spectral_cube_installed(): + """Check if spectral-cube is installed.""" + + spectral_cube_enabled = False try: - import casatasks - import casatools - casa_enabled = True - return casa_enabled + import spectral_cube + spectral_cube_enabled = True except (ImportError, ModuleNotFoundError): pass - return casa_enabled + return spectral_cube_enabled + diff --git a/phangsPipeline/clean_call.py b/phangsPipeline/clean_call.py index 0da9eba7..10b99fa3 100644 --- a/phangsPipeline/clean_call.py +++ b/phangsPipeline/clean_call.py @@ -2,11 +2,11 @@ This is a dummy CleanCall class for dry run only, or to be inheritted by casaImagingRoutines.CleanCall. """ -from multiprocessing.sharedctypes import Value -import numpy as np +import logging import re -import logging +import numpy as np + logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) diff --git a/phangsPipeline/conftest.py b/phangsPipeline/conftest.py deleted file mode 100644 index b30ef839..00000000 --- a/phangsPipeline/conftest.py +++ /dev/null @@ -1,59 +0,0 @@ -"""Configure Test Suite. - -This file is used to configure the behavior of pytest when using the Astropy -test infrastructure. It needs to live inside the package in order for it to -get picked up when running the tests inside an interpreter using -`phangs_imaging_pipeline.test()`. - -""" - -import os - -from astropy.version import version as astropy_version - -# For Astropy 3.0 and later, we can use the standalone pytest plugin -if astropy_version < '3.0': - from astropy.tests.pytest_plugins import * # noqa - del pytest_report_header - ASTROPY_HEADER = True -else: - try: - from pytest_astropy_header.display import PYTEST_HEADER_MODULES, TESTED_VERSIONS - ASTROPY_HEADER = True - except ImportError: - ASTROPY_HEADER = False - - -def pytest_configure(config): - """Configure Pytest with Astropy. - - Parameters - ---------- - config : pytest configuration - - """ - if ASTROPY_HEADER: - - config.option.astropy_header = True - - # Customize the following lines to add/remove entries from the list of - # packages for which version numbers are displayed when running the tests. - PYTEST_HEADER_MODULES.pop('Pandas', None) - PYTEST_HEADER_MODULES['scikit-image'] = 'skimage' - - from . import __version__ - packagename = os.path.basename(os.path.dirname(__file__)) - TESTED_VERSIONS[packagename] = __version__ - -# Uncomment the last two lines in this block to treat all DeprecationWarnings as -# exceptions. For Astropy v2.0 or later, there are 2 additional keywords, -# as follow (although default should work for most cases). -# To ignore some packages that produce deprecation warnings on import -# (in addition to 'compiler', 'scipy', 'pygments', 'ipykernel', and -# 'setuptools'), add: -# modules_to_ignore_on_import=['module_1', 'module_2'] -# To ignore some specific deprecation warning messages for Python version -# MAJOR.MINOR or later, add: -# warnings_to_ignore_by_pyver={(MAJOR, MINOR): ['Message to ignore']} -# from astropy.tests.helper import enable_deprecations_as_exceptions # noqa -# enable_deprecations_as_exceptions() diff --git a/phangsPipeline/data/README.rst b/phangsPipeline/data/README.rst deleted file mode 100644 index 382f6e76..00000000 --- a/phangsPipeline/data/README.rst +++ /dev/null @@ -1,6 +0,0 @@ -Data directory -============== - -This directory contains data files included with the package source -code distribution. Note that this is intended only for relatively small files -- large files should be externally hosted and downloaded as needed. diff --git a/phangsPipeline/ftplane_convolution.py b/phangsPipeline/ftplane_convolution.py index 64793cbc..06f0b0d3 100644 --- a/phangsPipeline/ftplane_convolution.py +++ b/phangsPipeline/ftplane_convolution.py @@ -1,11 +1,13 @@ -from spectral_cube import SpectralCube, VaryingResolutionSpectralCube -from radio_beam import Beam -import numpy as np -import astropy.units as u -import astropy.utils.console as console import copy import warnings +import astropy.units as u +import astropy.utils.console as console +import numpy as np +from radio_beam import Beam +from spectral_cube import SpectralCube, VaryingResolutionSpectralCube + + def ftconvolve(ImageIn, major = 1.0, minor = 1.0, angle = 0.0): NanMaskFlag = False diff --git a/phangsPipeline/handlerAlmaDownload.py b/phangsPipeline/handlerAlmaDownload.py index bddbbe48..9133cdbd 100644 --- a/phangsPipeline/handlerAlmaDownload.py +++ b/phangsPipeline/handlerAlmaDownload.py @@ -25,6 +25,20 @@ import tarfile from functools import partial +import astropy +import astropy.units as u +import numpy as np +import requests +from astropy import table +from astropy.coordinates import SkyCoord +from astroquery.alma import Alma, Conf +from astroquery.alma.utils import parse_frequency_support +from bs4 import BeautifulSoup + +from . import handlerKeys +from . import handlerTemplate +from . import utilsLines + # Tags to identify antenna arrays ANTENNA_ARRAY_SETUP = {'12m': ['DV', 'DA'], '7m': ['CM'], @@ -207,1020 +221,976 @@ def get_casa_version_from_calibration_script(script): logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) -# Check casa environment by importing CASA-only packages -from .casa_check import is_casa_installed - -casa_enabled = is_casa_installed() - -if casa_enabled: - logger.debug('casa_enabled = True') -else: - logger.debug('casa_enabled = False') - -# import phangs pipeline stuff -from . import utilsLines -from . import handlerTemplate -from . import handlerKeys - -try: - import astropy - import astroquery - from bs4 import BeautifulSoup - import requests - - has_imports = True -except ImportError: - logger.debug("Some required packages not installed.") - has_imports = False - -if has_imports: - import astropy.units as u - import numpy as np - from astropy import table - from astropy.coordinates import SkyCoord - from astroquery.alma import Alma, Conf - from astroquery.alma.utils import parse_frequency_support - - - class AlmaDownloadHandler(handlerTemplate.HandlerTemplate): - """ - Class to automate downloading and calibrating ALMA data. +class AlmaDownloadHandler(handlerTemplate.HandlerTemplate): + """ + Class to automate downloading and calibrating ALMA data. - N.B. Since clean masks are generated externally, this code will not modify that key file. It will, however, - modify ms_key, dir_key, linmos_key, singledish_key, and target_key. + N.B. Since clean masks are generated externally, this code will not modify that key file. It will, however, + modify ms_key, dir_key, linmos_key, singledish_key, and target_key. - This script may also produce weirdness if you have a multiple files for each key type. + This script may also produce weirdness if you have a multiple files for each key type. - """ + """ - ############ - # __init__ # - ############ - - def __init__( - self, - key_handler=None, - master_key=None, - restore_previous_target_keys=True, - dry_run=False, - ): - # inherit template class + ############ + # __init__ # + ############ + + def __init__( + self, + key_handler=None, + master_key=None, + restore_previous_target_keys=True, + dry_run=False, + ): + # inherit template class + handlerTemplate.HandlerTemplate.__init__(self, + key_handler=key_handler, + dry_run=dry_run) + + # If we've run this before, restore the original key file and reinitialise + key_root = self._kh._key_dir + target_key_file = self._kh._target_keys[0] + target_key_path = os.path.join(key_root, target_key_file) + pre_dl_target_key_path = target_key_path + '_pre_AlmaDownload' + + if os.path.exists(pre_dl_target_key_path) and restore_previous_target_keys: + logger.info('This is a rerun, restoring old target keys') + + os.system('rm -rf %s' % target_key_path) + os.system('mv %s %s' % (pre_dl_target_key_path, target_key_path)) + + key_handler = handlerKeys.KeyHandler(master_key=master_key) handlerTemplate.HandlerTemplate.__init__(self, key_handler=key_handler, dry_run=dry_run) - # If we've run this before, restore the original key file and reinitialise - key_root = self._kh._key_dir - target_key_file = self._kh._target_keys[0] - target_key_path = os.path.join(key_root, target_key_file) - pre_dl_target_key_path = target_key_path + '_pre_AlmaDownload' - - if os.path.exists(pre_dl_target_key_path) and restore_previous_target_keys: - logger.info('This is a rerun, restoring old target keys') - - os.system('rm -rf %s' % target_key_path) - os.system('mv %s %s' % (pre_dl_target_key_path, target_key_path)) - - key_handler = handlerKeys.KeyHandler(master_key=master_key) - handlerTemplate.HandlerTemplate.__init__(self, - key_handler=key_handler, - dry_run=dry_run) - - def loop_alma_download( - self, - do_all=False, - make_directories=True, - do_download=False, - do_calibrate=False, - do_build_key_files=False, - do_tp=False, - allow_proprietary=False, - username=None, - query_radius=10 * u.arcmin, - suppress_casa_output=True, - split_ms='mosaic', - overwrite_download=False, - overwrite_calibrate=False, - overwrite_build_key_files=True, - overwrite_all=False, - ): - """Download and calibrate ALMA data. - - Major steps: - - (1) Query archive for target/line/antenna config and download - (2) Figure out which version of CASA to run the scriptForPI in, and run that - (3) Build the file keys from the downloaded, calibrated files - - N.B. After running this you will need to reinitialise the key handler, since this makes edits to various - key files - - Args: - do_tp (bool, optional): If True, will also download TP data and sort out for the TP pipeline. Defaults - to False. - allow_proprietary (bool, optional): If True, will log in to the ALMA servers using username, to allow - for download of proprietary data. This requires a working keychain package! Defaults to False. - username (str, optional): ALMA username to login. On first time running this, it will ask for a - password. - query_radius (astropy.units, optional): Search radius for fallback coordinate query. Defaults to 10 - arcmin, same as the ALMA archive search. - split_ms (str, optional): Can be one of 'mosaic', 'join', 'separate'. Determines how the MS file key is - set up for later staging and imaging. If mosaic, will attempt to be smart and split observations by - project ID/science goal. If 'join', all observations will be joined together into a potentially - huge map. If 'separate', then each calibrated MS will be assigned to a separate target. Defaults to - 'mosaic', which mimics the way the PHANGS-ALMA targets are set up. - - """ - - if do_all: - make_directories = True - do_download = True - do_calibrate = True - do_build_key_files = True - do_tp = True - if overwrite_all: - overwrite_download = True - overwrite_calibrate = True - overwrite_build_key_files = True - - # Error checking - - if len(self.get_targets()) == 0: - logger.error("Need a target list.") - return None - - if len(self.get_all_products()) == 0: - logger.error("Need a products list.") - return None + def loop_alma_download( + self, + do_all=False, + make_directories=True, + do_download=False, + do_calibrate=False, + do_build_key_files=False, + do_tp=False, + allow_proprietary=False, + username=None, + query_radius=10 * u.arcmin, + suppress_casa_output=True, + split_ms='mosaic', + overwrite_download=False, + overwrite_calibrate=False, + overwrite_build_key_files=True, + overwrite_all=False, + ): + """Download and calibrate ALMA data. + + Major steps: + + (1) Query archive for target/line/antenna config and download + (2) Figure out which version of CASA to run the scriptForPI in, and run that + (3) Build the file keys from the downloaded, calibrated files + + N.B. After running this you will need to reinitialise the key handler, since this makes edits to various + key files + + Args: + do_tp (bool, optional): If True, will also download TP data and sort out for the TP pipeline. Defaults + to False. + allow_proprietary (bool, optional): If True, will log in to the ALMA servers using username, to allow + for download of proprietary data. This requires a working keychain package! Defaults to False. + username (str, optional): ALMA username to login. On first time running this, it will ask for a + password. + query_radius (astropy.units, optional): Search radius for fallback coordinate query. Defaults to 10 + arcmin, same as the ALMA archive search. + split_ms (str, optional): Can be one of 'mosaic', 'join', 'separate'. Determines how the MS file key is + set up for later staging and imaging. If mosaic, will attempt to be smart and split observations by + project ID/science goal. If 'join', all observations will be joined together into a potentially + huge map. If 'separate', then each calibrated MS will be assigned to a separate target. Defaults to + 'mosaic', which mimics the way the PHANGS-ALMA targets are set up. - # If requested, make the directories - if make_directories: - self._kh.make_missing_directories(ms_root=True) + """ - # If allowing proprietary data, login here and save password for later - if allow_proprietary: - if username is None: - logger.warning('username should be set!') - raise Exception('username should be set!') - Conf.username = username - alma = Alma() - alma.login(username, store_password=True) + if do_all: + make_directories = True + do_download = True + do_calibrate = True + do_build_key_files = True + do_tp = True + if overwrite_all: + overwrite_download = True + overwrite_calibrate = True + overwrite_build_key_files = True + + # Error checking + + if len(self.get_targets()) == 0: + logger.error("Need a target list.") + return None + + if len(self.get_all_products()) == 0: + logger.error("Need a products list.") + return None + + # If requested, make the directories + if make_directories: + self._kh.make_missing_directories(ms_root=True) + + # If allowing proprietary data, login here and save password for later + if allow_proprietary: + if username is None: + logger.warning('username should be set!') + raise Exception('username should be set!') + Conf.username = username + alma = Alma() + alma.login(username, store_password=True) + + # If requested, query/download/extract data + if do_download: - # If requested, query/download/extract data - if do_download: + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Beginning download/extraction of ALMA data") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Beginning download/extraction of ALMA data") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") + for this_target, this_product, this_config in \ + self.looper(do_targets=True, do_products=True, do_configs=True, just_interf=True): + uids = self.task_query(target=this_target, + product=this_product, + config=this_config, + query_radius=query_radius, + overwrite=overwrite_download) + self.task_download(target=this_target, + product=this_product, + config=this_config, + uids=uids, + username=username, + overwrite=overwrite_download) + + # Also potentially include TP + if do_tp: - for this_target, this_product, this_config in \ - self.looper(do_targets=True, do_products=True, do_configs=True, just_interf=True): + for this_target, this_product in \ + self.looper(do_targets=True, do_products=True, do_configs=False): uids = self.task_query(target=this_target, product=this_product, - config=this_config, - query_radius=query_radius, - overwrite=overwrite_download) + config='tp') self.task_download(target=this_target, product=this_product, - config=this_config, + config='tp', uids=uids, username=username, overwrite=overwrite_download) - # Also potentially include TP - if do_tp: - - for this_target, this_product in \ - self.looper(do_targets=True, do_products=True, do_configs=False): - uids = self.task_query(target=this_target, - product=this_product, - config='tp') - self.task_download(target=this_target, - product=this_product, - config='tp', - uids=uids, - username=username, - overwrite=overwrite_download) - - # If requested, run scriptForPI - if do_calibrate: - - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Beginning calibration of ALMA data") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") - - for this_target, this_config in \ - self.looper(do_targets=True, do_products=False, do_configs=True, just_interf=True): - self.task_run_scriptforpi(target=this_target, - config=this_config, - suppress_casa_output=suppress_casa_output, - overwrite=overwrite_calibrate) - - # If requested, build key files - if do_build_key_files: - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Building key files") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") - - self.task_build_key_files(do_tp=do_tp, - split_ms=split_ms, - overwrite=overwrite_build_key_files) - - def task_query(self, - target=None, - product=None, - config=None, - query_radius=10 * u.arcmin, - max_query_failures=10, - overwrite=False, - ): - """Query ALMA archive. - - Uses astroquery to search the archive for data which matches (a) the target, (b) the product, (c) the - antenna config (12m or 7m), and then velocity resolution. - - Args: - max_query_failures (int, optional): Number of times to try querying the database before failing out. - Defaults to 10 - Returns: - list of UIDs to download - - """ + # If requested, run scriptForPI + if do_calibrate: logger.info("") logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Querying ALMA database for:") - logger.info('{0}, {1}, {2}'.format(target, product, config)) + logger.info("Beginning calibration of ALMA data") logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") logger.info("") - if target is None: - logger.warning('Require a target') - return None - if product is None: - logger.warning('Require a product') - return None - if config is None or config not in ANTENNA_ARRAY_SETUP.keys(): - logger.warning('Require a valid config (%s)' % list(ANTENNA_ARRAY_SETUP.keys())) - return None - - ms_root = self._kh._ms_roots[0] - - # Pull out line, channel width, and central frequency for this combination of target, product, config - line = self._kh.get_line_tag_for_line_product(product) - channel_kms = self._kh._config_dict['line_product'][product]['channel_kms'] - vsys, vwidth = self._kh.get_system_velocity_and_velocity_width_for_target(target, check_parent=False) - line_low_ghz, line_high_ghz = utilsLines.get_ghz_range_for_line(line=line, - vsys_kms=vsys, - vwidth_kms=vwidth, - ) - line_ghz = ((line_high_ghz + line_low_ghz) / 2) * u.GHz + for this_target, this_config in \ + self.looper(do_targets=True, do_products=False, do_configs=True, just_interf=True): + self.task_run_scriptforpi(target=this_target, + config=this_config, + suppress_casa_output=suppress_casa_output, + overwrite=overwrite_calibrate) - # Perform a target query. - observations = query_target(target, max_query_failures=max_query_failures) - - # If the target doesn't resolve, fall back to RA/Dec search.key file building - if observations is None: - ra, dec = self._kh.get_phasecenter_for_target(target=target) - coords = SkyCoord('%s %s' % (ra, dec)) - observations = query_region(coords=coords, radius=query_radius, max_query_failures=max_query_failures) - - # Include custom switches - download_restrictions = self._kh.get_alma_download_restrictions(target=target, - product=product, - config=config) - - parsed_obs = table.Table() - - for observation in observations: - - # Check if the file actually already exists + # If requested, build key files + if do_build_key_files: + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Building key files") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") - proposal_id = observation['proposal_id'] - file_uid = observation['asdm_uid'].replace(':', '_').replace('/', '_') - tar_file_name = '%s_%s.asdm.sdm.tar' % (proposal_id, file_uid) + self.task_build_key_files(do_tp=do_tp, + split_ms=split_ms, + overwrite=overwrite_build_key_files) + + def task_query(self, + target=None, + product=None, + config=None, + query_radius=10 * u.arcmin, + max_query_failures=10, + overwrite=False, + ): + """Query ALMA archive. + + Uses astroquery to search the archive for data which matches (a) the target, (b) the product, (c) the + antenna config (12m or 7m), and then velocity resolution. + + Args: + max_query_failures (int, optional): Number of times to try querying the database before failing out. + Defaults to 10 + Returns: + list of UIDs to download - if config != 'tp': - file_name = os.path.join(ms_root, target, config, tar_file_name) - else: - file_name = os.path.join(ms_root, target, config, product, tar_file_name) + """ - if os.path.exists(file_name) and not overwrite: - continue + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Querying ALMA database for:") + logger.info('{0}, {1}, {2}'.format(target, product, config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") + + if target is None: + logger.warning('Require a target') + return None + if product is None: + logger.warning('Require a product') + return None + if config is None or config not in ANTENNA_ARRAY_SETUP.keys(): + logger.warning('Require a valid config (%s)' % list(ANTENNA_ARRAY_SETUP.keys())) + return None + + ms_root = self._kh._ms_roots[0] + + # Pull out line, channel width, and central frequency for this combination of target, product, config + line = self._kh.get_line_tag_for_line_product(product) + channel_kms = self._kh._config_dict['line_product'][product]['channel_kms'] + vsys, vwidth = self._kh.get_system_velocity_and_velocity_width_for_target(target, check_parent=False) + line_low_ghz, line_high_ghz = utilsLines.get_ghz_range_for_line(line=line, + vsys_kms=vsys, + vwidth_kms=vwidth, + ) + line_ghz = ((line_high_ghz + line_low_ghz) / 2) * u.GHz + + # Perform a target query. + observations = query_target(target, max_query_failures=max_query_failures) + + # If the target doesn't resolve, fall back to RA/Dec search.key file building + if observations is None: + ra, dec = self._kh.get_phasecenter_for_target(target=target) + coords = SkyCoord('%s %s' % (ra, dec)) + observations = query_region(coords=coords, radius=query_radius, max_query_failures=max_query_failures) + + # Include custom switches + download_restrictions = self._kh.get_alma_download_restrictions(target=target, + product=product, + config=config) + + parsed_obs = table.Table() + + for observation in observations: + + # Check if the file actually already exists + + proposal_id = observation['proposal_id'] + file_uid = observation['asdm_uid'].replace(':', '_').replace('/', '_') + tar_file_name = '%s_%s.asdm.sdm.tar' % (proposal_id, file_uid) + + if config != 'tp': + file_name = os.path.join(ms_root, target, config, tar_file_name) + else: + file_name = os.path.join(ms_root, target, config, product, tar_file_name) - # Do checks on velocity resolution - velocity_res = observation['velocity_resolution'] / 1000 - if velocity_res > channel_kms: - continue + if os.path.exists(file_name) and not overwrite: + continue - # Checks on arrays we want - antenna_arrays = observation['antenna_arrays'] + # Do checks on velocity resolution + velocity_res = observation['velocity_resolution'] / 1000 + if velocity_res > channel_kms: + continue - array_wanted = False + # Checks on arrays we want + antenna_arrays = observation['antenna_arrays'] - array_setup_tags = ANTENNA_ARRAY_SETUP[config] - for array_setup_tag in array_setup_tags: - if array_setup_tag in antenna_arrays: - array_wanted = True + array_wanted = False - # Sometimes it seems like TP can use other antenna (maybe this is only early science?) so double - # check here. TP only has 4 antenna so - if config != 'tp': - # for tp_array_tag in ANTENNA_ARRAY_SETUP['tp']: - # if tp_array_tag in antenna_arrays: - if antenna_arrays.count(':') <= 4: - array_wanted = False + array_setup_tags = ANTENNA_ARRAY_SETUP[config] + for array_setup_tag in array_setup_tags: + if array_setup_tag in antenna_arrays: + array_wanted = True - if not array_wanted: - continue + # Sometimes it seems like TP can use other antenna (maybe this is only early science?) so double + # check here. TP only has 4 antenna so + if config != 'tp': + # for tp_array_tag in ANTENNA_ARRAY_SETUP['tp']: + # if tp_array_tag in antenna_arrays: + if antenna_arrays.count(':') <= 4: + array_wanted = False - # Check the line we want is in the frequency range + if not array_wanted: + continue - freqs = parse_frequency_support(observation['frequency_support']) + # Check the line we want is in the frequency range - freq_wanted = False + freqs = parse_frequency_support(observation['frequency_support']) - for freq in freqs: - if freq[0] <= line_ghz <= freq[1]: - freq_wanted = True + freq_wanted = False - if not freq_wanted: - continue + for freq in freqs: + if freq[0] <= line_ghz <= freq[1]: + freq_wanted = True - # Check for any additional restrictions - restriction_found = False - if download_restrictions is not None: - for key in download_restrictions.keys(): - download_restriction = download_restrictions[key] - if not isinstance(download_restriction, list): - download_restriction = [download_restriction] + if not freq_wanted: + continue - if observation[key] not in download_restriction: - restriction_found = True - break + # Check for any additional restrictions + restriction_found = False + if download_restrictions is not None: + for key in download_restrictions.keys(): + download_restriction = download_restrictions[key] + if not isinstance(download_restriction, list): + download_restriction = [download_restriction] - if restriction_found: - continue + if observation[key] not in download_restriction: + restriction_found = True + break - # After we've parsed everything down, append that observation row to a new table - parsed_obs = table.vstack([parsed_obs, observation]) + if restriction_found: + continue - if len(parsed_obs) == 0: - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info('No suitable UIDs found') - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") - return None + # After we've parsed everything down, append that observation row to a new table + parsed_obs = table.vstack([parsed_obs, observation]) - uids = np.unique(parsed_obs['member_ous_uid']) + if len(parsed_obs) == 0: logger.info("") logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info('Found %d suitable UIDs:' % len(uids)) - for uid in uids: - logger.info('-> %s' % uid) + logger.info('No suitable UIDs found') logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") logger.info("") + return None + + uids = np.unique(parsed_obs['member_ous_uid']) + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info('Found %d suitable UIDs:' % len(uids)) + for uid in uids: + logger.info('-> %s' % uid) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") + + return uids + + def task_download(self, + target=None, + config=None, + product=None, + uids=None, + username=None, + n_simultaneous=5, + overwrite=False, + ): + """Downloads queried UIDs. + + Hooks the queried UIDs to download raw/ancillary/readme data. Will go into a folder structure as + ms_root/target/config/(product for TP), to make navigation a little easier + + Args: + username (str, optional): ALMA archive username. Defaults to None. + n_simultaneous (int, optional): Number of download processes to spawn, to speed up download time. + Defaults to 5. - return uids - - def task_download(self, - target=None, - config=None, - product=None, - uids=None, - username=None, - n_simultaneous=5, - overwrite=False, - ): - """Downloads queried UIDs. - - Hooks the queried UIDs to download raw/ancillary/readme data. Will go into a folder structure as - ms_root/target/config/(product for TP), to make navigation a little easier + """ - Args: - username (str, optional): ALMA archive username. Defaults to None. - n_simultaneous (int, optional): Number of download processes to spawn, to speed up download time. - Defaults to 5. + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Downloading/extracting data for:") + logger.info('{0}, {1}, {2}'.format(target, product, config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") + + if target is None: + logger.warning('Require a target') + return None + if config is None: + logger.warning('Require a config') + return None + if product is None and config == 'tp': + logger.warning('Require a product for TP') + return None + if uids is None: + logger.warning('Require UIDs to download') + return None + + # Setup download location, this will be ms_root/{target}/{config} + ms_root = self._kh._ms_roots[0] + + if config == 'tp': + dl_dir = os.path.join(ms_root, target, config, product) + else: + dl_dir = os.path.join(ms_root, target, config) - """ + # Delete download folder if we're overwriting + if overwrite: + os.system('rm -rf %s' % dl_dir) + + if not os.path.exists(dl_dir): + os.makedirs(dl_dir) + + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info('Downloading ALMA data for %d UIDs:' % len(uids)) + for uid in uids: + logger.info('-> %s' % uid) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") + + link_list = Alma.get_data_info(uids, expand_tarfiles=False) + + # Trim out any pointless files + rows_to_remove = [] + for i, link in enumerate(link_list): + if 'asdm.sdm' not in link['access_url'] and \ + 'README' not in link['access_url'] and \ + 'auxiliary' not in link['access_url']: + rows_to_remove.append(i) + link_list.remove_rows(rows_to_remove) + + # Download files. Allow multiple downloads via pool + with mp.Pool(n_simultaneous) as p: + map_result = p.map_async(partial(astroquery_download, cache_location=dl_dir, username=username), + link_list) + map_result.wait() + + # Extract tar files + original_dir = os.getcwd() + os.chdir(dl_dir) + + original_tar_files = sorted(glob.glob('*.tar')) + + # Check if we've already untarred + tar_files = [] + for tar_file in original_tar_files: + if not os.path.exists('%s_touch' % tar_file): + tar_files.append(tar_file) + + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info('Untarring %d files:' % len(tar_files)) + for tar_file in tar_files: + logger.info('-> %s' % tar_file) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") + + # Loop over and once they've been untarred make a file so we know to skip next time + + for tar_file in tar_files: + os.system('tar --skip-old-files -xf %s' % tar_file) + os.system('touch %s_touch' % tar_file) + + os.chdir(original_dir) + + def task_run_scriptforpi(self, + target=None, + config=None, + suppress_casa_output=True, + overwrite=False): + """Runs scriptForPI on downloaded datasets. + + Figures out the pipeline version of CASA to run (currently, just from the QA report), and then will execute + that script to produce a calibrated measurement set + + Args: + suppress_casa_output (bool, optional): If True, will suppress most CASA output while running + scriptForPI. Mostly useful for debugging, and will still put the whole log into the /script + directory. Defaults to True. - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Downloading/extracting data for:") - logger.info('{0}, {1}, {2}'.format(target, product, config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") + """ - if target is None: - logger.warning('Require a target') - return None - if config is None: - logger.warning('Require a config') - return None - if product is None and config == 'tp': - logger.warning('Require a product for TP') - return None - if uids is None: - logger.warning('Require UIDs to download') - return None + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Calibrating data for:") + logger.info('{0}, {1}'.format(target, config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") - # Setup download location, this will be ms_root/{target}/{config} - ms_root = self._kh._ms_roots[0] + if target is None: + logger.warning('Require a target') + return None + if config is None or config not in ANTENNA_ARRAY_SETUP.keys(): + logger.warning('Require a valid config (%s)' % list(ANTENNA_ARRAY_SETUP.keys())) + return None - if config == 'tp': - dl_dir = os.path.join(ms_root, target, config, product) - else: - dl_dir = os.path.join(ms_root, target, config) + ms_root = self._kh._ms_roots[0] + dl_dir = os.path.join(ms_root, target, config) - # Delete download folder if we're overwriting - if overwrite: - os.system('rm -rf %s' % dl_dir) + if not os.path.exists(dl_dir): + logger.warning('Directory for %s, %s does not exist. Returning' % (target, config)) + return None - if not os.path.exists(dl_dir): - os.makedirs(dl_dir) + original_dir = os.getcwd() + os.chdir(dl_dir) - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info('Downloading ALMA data for %d UIDs:' % len(uids)) - for uid in uids: - logger.info('-> %s' % uid) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") + casa_version_files = {} - link_list = Alma.get_data_info(uids, expand_tarfiles=False) + # Search for calibration scripts, QA reports, and weblogs. Prefer the calibration script highest, since that + # will crash out if the wrong version is chosen + file_types = {'calibration_script': '*.scriptForCalibration.py', + 'qa_report': '*.qa2_report.html', + 'weblog': '*.weblog.*', + } - # Trim out any pointless files - rows_to_remove = [] - for i, link in enumerate(link_list): - if 'asdm.sdm' not in link['access_url'] and \ - 'README' not in link['access_url'] and \ - 'auxiliary' not in link['access_url']: - rows_to_remove.append(i) - link_list.remove_rows(rows_to_remove) + for file_type in file_types.keys(): + file_ext = file_types[file_type] - # Download files. Allow multiple downloads via pool - with mp.Pool(n_simultaneous) as p: - map_result = p.map_async(partial(astroquery_download, cache_location=dl_dir, username=username), - link_list) - map_result.wait() + for root, dirnames, filenames in os.walk(os.getcwd()): + for filename in fnmatch.filter(filenames, file_ext): + par_dir = os.path.dirname(root) + if par_dir not in casa_version_files.keys(): + casa_version_files[par_dir] = {'filename': os.path.join(root, filename), + 'type': file_type} - # Extract tar files - original_dir = os.getcwd() - os.chdir(dl_dir) + for root_dir in sorted(casa_version_files.keys()): - original_tar_files = sorted(glob.glob('*.tar')) + casa_version_file_type = casa_version_files[root_dir]['type'] + casa_version_file = casa_version_files[root_dir]['filename'] - # Check if we've already untarred - tar_files = [] - for tar_file in original_tar_files: - if not os.path.exists('%s_touch' % tar_file): - tar_files.append(tar_file) + member_dir = os.path.sep + os.path.join(*casa_version_file.split(os.path.sep)[:-2]) + calibrated_dir = os.path.join(member_dir, 'calibrated') + script_dir = os.path.join(member_dir, 'script') - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info('Untarring %d files:' % len(tar_files)) - for tar_file in tar_files: - logger.info('-> %s' % tar_file) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") + if os.path.exists(calibrated_dir) and not overwrite: + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info('Data already calibrated, will not rerun for %s' % member_dir.replace(ms_root, '')) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") + continue - # Loop over and once they've been untarred make a file so we know to skip next time + # Clear out the calibrated directory + os.system('rm -rf %s' % calibrated_dir) - for tar_file in tar_files: - os.system('tar --skip-old-files -xf %s' % tar_file) - os.system('touch %s_touch' % tar_file) + if casa_version_file_type == 'qa_report': + casa_pipeline_version = get_casa_version_from_qa_report(casa_version_file) + elif casa_version_file_type == 'weblog': + casa_pipeline_version = get_casa_version_from_weblog(casa_version_file) + elif casa_version_file_type == 'calibration_script': + casa_pipeline_version = get_casa_version_from_calibration_script(casa_version_file) + else: + logger.warning('Version file type %s not understood' % casa_version_file_type) + raise Exception('Version file type %s not understood' % casa_version_file_type) - os.chdir(original_dir) + if casa_pipeline_version is None: + logger.warning('Could not find a CASA version to run scriptForPI') + raise Exception('Could not find a CASA version to run scriptForPI') - def task_run_scriptforpi(self, - target=None, - config=None, - suppress_casa_output=True, - overwrite=False): - """Runs scriptForPI on downloaded datasets. + # Get the CASA path + casa_path = self._kh.get_path_for_casaversion(casa_pipeline_version) - Figures out the pipeline version of CASA to run (currently, just from the QA report), and then will execute - that script to produce a calibrated measurement set + if casa_path is None: + logger.warning('No CASA path defined for %s' % casa_pipeline_version) + raise Exception('No CASA path defined for %s' % casa_pipeline_version) - Args: - suppress_casa_output (bool, optional): If True, will suppress most CASA output while running - scriptForPI. Mostly useful for debugging, and will still put the whole log into the /script - directory. Defaults to True. + # Make sure we're properly pointing at CASA + if not casa_path.endswith('casa'): + casa_path = os.path.join(casa_path, 'casa') - """ + os.chdir(script_dir) + if os.path.exists('scriptForPI.py'): + script_names = ['scriptForPI.py'] + else: + script_names = glob.glob('*scriptForPI.py') logger.info("") logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Calibrating data for:") - logger.info('{0}, {1}'.format(target, config)) + logger.info('Calibrating %s' % member_dir.replace(ms_root, '')) logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") logger.info("") - if target is None: - logger.warning('Require a target') - return None - if config is None or config not in ANTENNA_ARRAY_SETUP.keys(): - logger.warning('Require a valid config (%s)' % list(ANTENNA_ARRAY_SETUP.keys())) - return None - - ms_root = self._kh._ms_roots[0] - dl_dir = os.path.join(ms_root, target, config) - - if not os.path.exists(dl_dir): - logger.warning('Directory for %s, %s does not exist. Returning' % (target, config)) - return None - - original_dir = os.getcwd() - os.chdir(dl_dir) - - casa_version_files = {} - - # Search for calibration scripts, QA reports, and weblogs. Prefer the calibration script highest, since that - # will crash out if the wrong version is chosen - file_types = {'calibration_script': '*.scriptForCalibration.py', - 'qa_report': '*.qa2_report.html', - 'weblog': '*.weblog.*', - } - - for file_type in file_types.keys(): - file_ext = file_types[file_type] - - for root, dirnames, filenames in os.walk(os.getcwd()): - for filename in fnmatch.filter(filenames, file_ext): - par_dir = os.path.dirname(root) - if par_dir not in casa_version_files.keys(): - casa_version_files[par_dir] = {'filename': os.path.join(root, filename), - 'type': file_type} - - for root_dir in sorted(casa_version_files.keys()): - - casa_version_file_type = casa_version_files[root_dir]['type'] - casa_version_file = casa_version_files[root_dir]['filename'] - - member_dir = os.path.sep + os.path.join(*casa_version_file.split(os.path.sep)[:-2]) - calibrated_dir = os.path.join(member_dir, 'calibrated') - script_dir = os.path.join(member_dir, 'script') - - if os.path.exists(calibrated_dir) and not overwrite: - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info('Data already calibrated, will not rerun for %s' % member_dir.replace(ms_root, '')) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") - continue - - # Clear out the calibrated directory - os.system('rm -rf %s' % calibrated_dir) - - if casa_version_file_type == 'qa_report': - casa_pipeline_version = get_casa_version_from_qa_report(casa_version_file) - elif casa_version_file_type == 'weblog': - casa_pipeline_version = get_casa_version_from_weblog(casa_version_file) - elif casa_version_file_type == 'calibration_script': - casa_pipeline_version = get_casa_version_from_calibration_script(casa_version_file) - else: - logger.warning('Version file type %s not understood' % casa_version_file_type) - raise Exception('Version file type %s not understood' % casa_version_file_type) - - if casa_pipeline_version is None: - logger.warning('Could not find a CASA version to run scriptForPI') - raise Exception('Could not find a CASA version to run scriptForPI') - - # Get the CASA path - casa_path = self._kh.get_path_for_casaversion(casa_pipeline_version) - - if casa_path is None: - logger.warning('No CASA path defined for %s' % casa_pipeline_version) - raise Exception('No CASA path defined for %s' % casa_pipeline_version) - - # Make sure we're properly pointing at CASA - if not casa_path.endswith('casa'): - casa_path = os.path.join(casa_path, 'casa') - - os.chdir(script_dir) - if os.path.exists('scriptForPI.py'): - script_names = ['scriptForPI.py'] - else: - script_names = glob.glob('*scriptForPI.py') - + for script_name in script_names: logger.info("") logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info('Calibrating %s' % member_dir.replace(ms_root, '')) + logger.info('Running %s in CASA %s' % (script_name, casa_pipeline_version)) logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") logger.info("") - for script_name in script_names: - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info('Running %s in CASA %s' % (script_name, casa_pipeline_version)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") + # If the CASA version is 4.6, there is no pipeline version so don't use that switch + if '4.6.' in casa_pipeline_version: + pipeline_cmd = '' + else: + pipeline_cmd = '--pipeline' - # If the CASA version is 4.6, there is no pipeline version so don't use that switch - if '4.6.' in casa_pipeline_version: - pipeline_cmd = '' - else: - pipeline_cmd = '--pipeline' + cmd = '%s %s --nologger -c %s' % (casa_path, pipeline_cmd, script_name) + if suppress_casa_output: + cmd += ' >/dev/null 2>&1' + exit_code = os.system(cmd) - cmd = '%s %s --nologger -c %s' % (casa_path, pipeline_cmd, script_name) - if suppress_casa_output: - cmd += ' >/dev/null 2>&1' - exit_code = os.system(cmd) + # Sometimes this actually fails gracefully, so check we've got some files + output_mses = glob.glob('../calibrated/*.ms.split.cal') + glob.glob('../calibrated/*.ms') - # Sometimes this actually fails gracefully, so check we've got some files - output_mses = glob.glob('../calibrated/*.ms.split.cal') + glob.glob('../calibrated/*.ms') + # If we don't execute properly, this might be a case that it's an early cycle that's been re-imaged + # and the numbers are wrong in the report. Loop round some early CASA versions and try again + if exit_code != 0 or len(output_mses) == 0: - # If we don't execute properly, this might be a case that it's an early cycle that's been re-imaged - # and the numbers are wrong in the report. Loop round some early CASA versions and try again - if exit_code != 0 or len(output_mses) == 0: + # TODO: Loop over some early CASA versions. Looking at the list of pipeline versions + # (https://almascience.nrao.edu/processing/science-pipeline), these should cover them all, but + # leaving a note here in case we run into some weird cases + fallback_casa_versions = ['4.2.2', '4.3.1', '4.5.3', '4.7.2'] - # TODO: Loop over some early CASA versions. Looking at the list of pipeline versions - # (https://almascience.nrao.edu/processing/science-pipeline), these should cover them all, but - # leaving a note here in case we run into some weird cases - fallback_casa_versions = ['4.2.2', '4.3.1', '4.5.3', '4.7.2'] + for fallback_casa_version in fallback_casa_versions: - for fallback_casa_version in fallback_casa_versions: + os.system('rm -rf ../calibrated') - os.system('rm -rf ../calibrated') + fallback_casa_path = self._kh.get_path_for_casaversion(fallback_casa_version) - fallback_casa_path = self._kh.get_path_for_casaversion(fallback_casa_version) + if fallback_casa_path is None: + logger.warning('No CASA path defined for %s. Skipping' % fallback_casa_version) + continue - if fallback_casa_path is None: - logger.warning('No CASA path defined for %s. Skipping' % fallback_casa_version) - continue + # Make sure we're properly pointing at CASA + if not fallback_casa_path.endswith('casa'): + fallback_casa_path = os.path.join(fallback_casa_path, 'casa') - # Make sure we're properly pointing at CASA - if not fallback_casa_path.endswith('casa'): - fallback_casa_path = os.path.join(fallback_casa_path, 'casa') + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info('CASA %s failed, falling back to %s' % + (casa_pipeline_version, fallback_casa_version)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info('CASA %s failed, falling back to %s' % - (casa_pipeline_version, fallback_casa_version)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") + # If the CASA version is 4.6, there is no pipeline version so don't use that switch + if '4.6.' in fallback_casa_version: + pipeline_cmd = '' + else: + pipeline_cmd = '--pipeline' - # If the CASA version is 4.6, there is no pipeline version so don't use that switch - if '4.6.' in fallback_casa_version: - pipeline_cmd = '' - else: - pipeline_cmd = '--pipeline' - - cmd = '%s %s --nologger -c %s' % (fallback_casa_path, pipeline_cmd, script_name) - if suppress_casa_output: - cmd += ' >/dev/null 2>&1' - fallback_exit_code = os.system(cmd) - - # Sometimes this actually fails gracefully, so check we've got some files - output_mses = glob.glob('../calibrated/*.ms.split.cal') + glob.glob('../calibrated/*.ms') - - if fallback_exit_code != 0 or len(output_mses) == 0: - os.system('rm -rf ../calibrated') - logger.warning("") - logger.warning("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.warning('Calibration still failing for %s' % fallback_casa_version) - logger.warning("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.warning("") - else: - break + cmd = '%s %s --nologger -c %s' % (fallback_casa_path, pipeline_cmd, script_name) + if suppress_casa_output: + cmd += ' >/dev/null 2>&1' + fallback_exit_code = os.system(cmd) - os.chdir(dl_dir) + # Sometimes this actually fails gracefully, so check we've got some files + output_mses = glob.glob('../calibrated/*.ms.split.cal') + glob.glob('../calibrated/*.ms') + + if fallback_exit_code != 0 or len(output_mses) == 0: + os.system('rm -rf ../calibrated') + logger.warning("") + logger.warning("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.warning('Calibration still failing for %s' % fallback_casa_version) + logger.warning("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.warning("") + else: + break os.chdir(dl_dir) - # Check everything now has a calibrated dir, which indicates the script has run - calibrated_dirs = [] - for path, dirs, files in os.walk(os.getcwd()): - if 'calibration' in dirs: - calibrated_dirs.append(os.path.join(path, 'calibrated')) + os.chdir(dl_dir) - missing_dirs = False - for calibrated_dir in calibrated_dirs: - if not os.path.exists(calibrated_dir): - # TODO: This will find any missing 'calibrated' directories, so keeping as a TODO for corner cases - logger.warning('Unexpected missing calibrated directory! %s' % calibrated_dir) - missing_dirs = True + # Check everything now has a calibrated dir, which indicates the script has run + calibrated_dirs = [] + for path, dirs, files in os.walk(os.getcwd()): + if 'calibration' in dirs: + calibrated_dirs.append(os.path.join(path, 'calibrated')) - if missing_dirs: - raise Exception('Missing some calibrated directories!') + missing_dirs = False + for calibrated_dir in calibrated_dirs: + if not os.path.exists(calibrated_dir): + # TODO: This will find any missing 'calibrated' directories, so keeping as a TODO for corner cases + logger.warning('Unexpected missing calibrated directory! %s' % calibrated_dir) + missing_dirs = True - # Move back to the original directory - os.chdir(original_dir) + if missing_dirs: + raise Exception('Missing some calibrated directories!') - def task_build_key_files(self, - do_tp=False, - split_ms='mosaic', - overwrite=False, - max_query_failures=10): - """Builds MS file key from calibrated measurement sets. + # Move back to the original directory + os.chdir(original_dir) - Recursively search through calibrated measurement sets and build up into a key file to be read in for later - imaging. + def task_build_key_files(self, + do_tp=False, + split_ms='mosaic', + overwrite=False, + max_query_failures=10): + """Builds MS file key from calibrated measurement sets. - """ + Recursively search through calibrated measurement sets and build up into a key file to be read in for later + imaging. - if split_ms not in ALLOWED_MS_GROUPINGS: - logger.warning('split_ms=%s not allowed. Should be one of %s' % (split_ms, ALLOWED_MS_GROUPINGS)) - return None + """ - key_root = self._kh._key_dir - ms_root = self._kh._ms_roots[0] + if split_ms not in ALLOWED_MS_GROUPINGS: + logger.warning('split_ms=%s not allowed. Should be one of %s' % (split_ms, ALLOWED_MS_GROUPINGS)) + return None - # Pull out file names - dir_key_file = self._kh._dir_keys[0] - linmos_key_file = self._kh._linmos_keys[0] - ms_key_file = self._kh._ms_keys[0] - target_key_file = self._kh._target_keys[0] + key_root = self._kh._key_dir + ms_root = self._kh._ms_roots[0] - dir_key_file_name = os.path.join(key_root, dir_key_file) - linmos_key_file_name = os.path.join(key_root, linmos_key_file) - ms_file_name = os.path.join(key_root, ms_key_file) - target_file_name = os.path.join(key_root, target_key_file) + # Pull out file names + dir_key_file = self._kh._dir_keys[0] + linmos_key_file = self._kh._linmos_keys[0] + ms_key_file = self._kh._ms_keys[0] + target_key_file = self._kh._target_keys[0] - if do_tp: - sd_key_file = self._kh._sd_keys[0] - sd_key_file_name = os.path.join(key_root, sd_key_file) + dir_key_file_name = os.path.join(key_root, dir_key_file) + linmos_key_file_name = os.path.join(key_root, linmos_key_file) + ms_file_name = os.path.join(key_root, ms_key_file) + target_file_name = os.path.join(key_root, target_key_file) - file_names_stacked = [dir_key_file_name, - linmos_key_file_name, - ms_file_name] + if do_tp: + sd_key_file = self._kh._sd_keys[0] + sd_key_file_name = os.path.join(key_root, sd_key_file) - if any([os.path.exists(file_name) for file_name in file_names_stacked]) and not overwrite: - logger.info('Files already exists and overwrite is False. Will not overwrite') - return None + file_names_stacked = [dir_key_file_name, + linmos_key_file_name, + ms_file_name] - os.system('rm -rf %s' % ms_file_name) + if any([os.path.exists(file_name) for file_name in file_names_stacked]) and not overwrite: + logger.info('Files already exists and overwrite is False. Will not overwrite') + return None - ms_file = open(ms_file_name, 'w+') + os.system('rm -rf %s' % ms_file_name) - mosaic_info = {} - singledish_info = {} + ms_file = open(ms_file_name, 'w+') - targets = self._kh.get_targets() + mosaic_info = {} + singledish_info = {} - for target in targets: + targets = self._kh.get_targets() - observations = query_target(target, max_query_failures=max_query_failures) + for target in targets: - # If the target doesn't resolve, fall back to RA/Dec search.key file building - if observations is None: - ra, dec = self._kh.get_phasecenter_for_target(target=target) - coords = SkyCoord('%s %s' % (ra, dec)) - observations = query_region(coords=coords, radius=query_radius, - max_query_failures=max_query_failures) + observations = query_target(target, max_query_failures=max_query_failures) - dl_dir = os.path.join(ms_root, target) + # If the target doesn't resolve, fall back to RA/Dec search.key file building + if observations is None: + ra, dec = self._kh.get_phasecenter_for_target(target=target) + coords = SkyCoord('%s %s' % (ra, dec)) + observations = query_region(coords=coords, radius=query_radius, + max_query_failures=max_query_failures) - original_dir = os.getcwd() - os.chdir(dl_dir) + dl_dir = os.path.join(ms_root, target) - target_ms_dict = {} - all_configs = [] - all_project_ids = [] - all_science_goals = [] + original_dir = os.getcwd() + os.chdir(dl_dir) - # Search recursively for files - for root, dirnames, filenames in os.walk(os.getcwd()): + target_ms_dict = {} + all_configs = [] + all_project_ids = [] + all_science_goals = [] - if 'tp' in root.split(os.path.sep) and do_tp: - # Find the member.* directory for TP data - search_term = 'member.*' - search_type = 'tp' - else: - # TODO: This will miss non '.ms.split.cal', so keeping this as a TODO for corner cases. - search_term = '*.ms.split.cal' - search_type = 'int' - - for dirname in fnmatch.filter(dirnames, search_term): - - full_dir = os.path.join(root, dirname).split(ms_root)[1] - full_dir_split = full_dir.split(os.path.sep) - - # Pull out config, product, project ID, science goal, member UID - if search_type == 'int': - config = full_dir_split[1] - product = None - project_id = full_dir_split[2] - science_goal = full_dir_split[3] - member_uid = full_dir_split[5] - elif search_type == 'tp': - config = full_dir_split[1] - product = full_dir_split[2] - project_id = full_dir_split[3] - science_goal = full_dir_split[4] - member_uid = full_dir_split[6] - else: - raise Exception('search_type %s not known!' % search_type) + # Search recursively for files + for root, dirnames, filenames in os.walk(os.getcwd()): - # Use member uid to get at coordinates of the observation + if 'tp' in root.split(os.path.sep) and do_tp: + # Find the member.* directory for TP data + search_term = 'member.*' + search_type = 'tp' + else: + # TODO: This will miss non '.ms.split.cal', so keeping this as a TODO for corner cases. + search_term = '*.ms.split.cal' + search_type = 'int' + + for dirname in fnmatch.filter(dirnames, search_term): + + full_dir = os.path.join(root, dirname).split(ms_root)[1] + full_dir_split = full_dir.split(os.path.sep) + + # Pull out config, product, project ID, science goal, member UID + if search_type == 'int': + config = full_dir_split[1] + product = None + project_id = full_dir_split[2] + science_goal = full_dir_split[3] + member_uid = full_dir_split[5] + elif search_type == 'tp': + config = full_dir_split[1] + product = full_dir_split[2] + project_id = full_dir_split[3] + science_goal = full_dir_split[4] + member_uid = full_dir_split[6] + else: + raise Exception('search_type %s not known!' % search_type) - member_uid = member_uid.replace('___', '://').replace('_', '/').replace('member.', '') - observations_ms = observations[member_uid == observations['member_ous_uid']][0] - ra, dec = observations_ms['s_ra'], observations_ms['s_dec'] + # Use member uid to get at coordinates of the observation - coord = SkyCoord(ra=ra * u.deg, dec=dec * u.deg) - ra_str, dec_str = coord.to_string('hmsdms').split() + member_uid = member_uid.replace('___', '://').replace('_', '/').replace('member.', '') + observations_ms = observations[member_uid == observations['member_ous_uid']][0] + ra, dec = observations_ms['s_ra'], observations_ms['s_dec'] - if config not in all_configs: - all_configs.append(config) - if project_id not in all_project_ids: - all_project_ids.append(project_id) - if science_goal not in all_science_goals: - all_science_goals.append(science_goal) + coord = SkyCoord(ra=ra * u.deg, dec=dec * u.deg) + ra_str, dec_str = coord.to_string('hmsdms').split() - target_ms_dict[full_dir] = {'config': config, - 'product': product, - 'project_id': project_id, - 'science_goal': science_goal, - 'ra': ra_str, - 'dec': dec_str - } + if config not in all_configs: + all_configs.append(config) + if project_id not in all_project_ids: + all_project_ids.append(project_id) + if science_goal not in all_science_goals: + all_science_goals.append(science_goal) - # Start writing these things out - mosaic_no = 1 - if split_ms == 'mosaic': + target_ms_dict[full_dir] = {'config': config, + 'product': product, + 'project_id': project_id, + 'science_goal': science_goal, + 'ra': ra_str, + 'dec': dec_str + } - # For mosaics, group up by project ID/science goal. This captures mosaics at different times and - # multiple mosaics within a single project + # Start writing these things out + mosaic_no = 1 + if split_ms == 'mosaic': - using_mosaic = False + # For mosaics, group up by project ID/science goal. This captures mosaics at different times and + # multiple mosaics within a single project - if len(all_project_ids) == 1 and len(all_science_goals) == 1: - target_key = copy.deepcopy(target) - else: - target_key = '%s_%d' % (target, mosaic_no) - using_mosaic = True - - for project_id in all_project_ids: - for science_goal in all_science_goals: - observation_number = {config: 1 for config in all_configs} - match_found = False - match_key = None - for key in target_ms_dict.keys(): - if target_ms_dict[key]['project_id'] == project_id and \ - target_ms_dict[key]['science_goal'] == science_goal: - match_found = True - match_key = copy.deepcopy(key) - config = target_ms_dict[key]['config'] - product = target_ms_dict[key]['product'] - ms_file.write('%s\t%s\tall\t%s\t%d\t%s\n' % - (target_key, project_id, config, observation_number[config], key)) - observation_number[config] += 1 - - # Save any singledish info - if config == 'tp': - singledish_info[target_key] = {'original_target': target, - 'product': product, - 'ms_filename': key} - - if match_found: - - ra, dec = target_ms_dict[match_key]['ra'], target_ms_dict[match_key]['dec'] - - if using_mosaic: - mosaic_info[target_key] = {} - mosaic_info[target_key]['original_target'] = target - mosaic_info[target_key]['ra'] = ra - mosaic_info[target_key]['dec'] = dec - - mosaic_no += 1 - target_key = '%s_%d' % (target, mosaic_no) - ms_file.write('\n') - - elif split_ms == 'join': - - # For join, collapse all observations for a single target down so imaging will clean everything at - # once + using_mosaic = False + if len(all_project_ids) == 1 and len(all_science_goals) == 1: target_key = copy.deepcopy(target) - observation_number = {config: 1 for config in all_configs} - for key in target_ms_dict.keys(): - config = target_ms_dict[key]['config'] - product = target_ms_dict[key]['product'] - project_id = target_ms_dict[key]['project_id'] - ms_file.write('%s\t%s\tall\t%s\t%d\t%s\n' % - (target_key, project_id, config, observation_number[config], key)) - observation_number[config] += 1 - - # Save any singledish info - if config == 'tp': - singledish_info[target_key] = {'original_target': target, - 'product': product, - 'ms_filename': key} - - ms_file.write('\n') - - elif split_ms == 'separate': - - # For separate, every observation will be imaged separately - + else: target_key = '%s_%d' % (target, mosaic_no) - observation_number = 1 - for key in target_ms_dict.keys(): - config = target_ms_dict[key]['config'] - product = target_ms_dict[key]['product'] - project_id = target_ms_dict[key]['project_id'] - - ms_file.write('%s\t%s\tall\t%s\t%d\t%s\n' % - (target_key, project_id, config, observation_number, key)) - ms_file.write('\n') - - ra, dec = target_ms_dict[key]['ra'], target_ms_dict[key]['dec'] + using_mosaic = True + + for project_id in all_project_ids: + for science_goal in all_science_goals: + observation_number = {config: 1 for config in all_configs} + match_found = False + match_key = None + for key in target_ms_dict.keys(): + if target_ms_dict[key]['project_id'] == project_id and \ + target_ms_dict[key]['science_goal'] == science_goal: + match_found = True + match_key = copy.deepcopy(key) + config = target_ms_dict[key]['config'] + product = target_ms_dict[key]['product'] + ms_file.write('%s\t%s\tall\t%s\t%d\t%s\n' % + (target_key, project_id, config, observation_number[config], key)) + observation_number[config] += 1 + + # Save any singledish info + if config == 'tp': + singledish_info[target_key] = {'original_target': target, + 'product': product, + 'ms_filename': key} + + if match_found: + + ra, dec = target_ms_dict[match_key]['ra'], target_ms_dict[match_key]['dec'] + + if using_mosaic: + mosaic_info[target_key] = {} + mosaic_info[target_key]['original_target'] = target + mosaic_info[target_key]['ra'] = ra + mosaic_info[target_key]['dec'] = dec + + mosaic_no += 1 + target_key = '%s_%d' % (target, mosaic_no) + ms_file.write('\n') + + elif split_ms == 'join': + + # For join, collapse all observations for a single target down so imaging will clean everything at + # once + + target_key = copy.deepcopy(target) + observation_number = {config: 1 for config in all_configs} + for key in target_ms_dict.keys(): + config = target_ms_dict[key]['config'] + product = target_ms_dict[key]['product'] + project_id = target_ms_dict[key]['project_id'] + ms_file.write('%s\t%s\tall\t%s\t%d\t%s\n' % + (target_key, project_id, config, observation_number[config], key)) + observation_number[config] += 1 + + # Save any singledish info + if config == 'tp': + singledish_info[target_key] = {'original_target': target, + 'product': product, + 'ms_filename': key} + + ms_file.write('\n') + + elif split_ms == 'separate': + + # For separate, every observation will be imaged separately + + target_key = '%s_%d' % (target, mosaic_no) + observation_number = 1 + for key in target_ms_dict.keys(): + config = target_ms_dict[key]['config'] + product = target_ms_dict[key]['product'] + project_id = target_ms_dict[key]['project_id'] + + ms_file.write('%s\t%s\tall\t%s\t%d\t%s\n' % + (target_key, project_id, config, observation_number, key)) + ms_file.write('\n') - mosaic_info[target_key] = {} - mosaic_info[target_key]['original_target'] = target - mosaic_info[target_key]['product'] = product - mosaic_info[target_key]['ra'] = ra - mosaic_info[target_key]['dec'] = dec + ra, dec = target_ms_dict[key]['ra'], target_ms_dict[key]['dec'] - # Save any singledish info - if config == 'tp': - singledish_info[target_key] = {'original_target': target, - 'product': product, - 'ms_filename': key} + mosaic_info[target_key] = {} + mosaic_info[target_key]['original_target'] = target + mosaic_info[target_key]['product'] = product + mosaic_info[target_key]['ra'] = ra + mosaic_info[target_key]['dec'] = dec - mosaic_no += 1 - target_key = '%s_%d' % (target, mosaic_no) + # Save any singledish info + if config == 'tp': + singledish_info[target_key] = {'original_target': target, + 'product': product, + 'ms_filename': key} - os.chdir(original_dir) + mosaic_no += 1 + target_key = '%s_%d' % (target, mosaic_no) - ms_file.close() + os.chdir(original_dir) - # Write out dir key + ms_file.close() - os.system('rm -rf %s' % dir_key_file_name) - dir_file = open(dir_key_file_name, 'w+') + # Write out dir key - for key in mosaic_info.keys(): - target_name = mosaic_info[key]['original_target'] - dir_file.write('%s\t%s\n' % (key, target_name)) + os.system('rm -rf %s' % dir_key_file_name) + dir_file = open(dir_key_file_name, 'w+') - dir_file.write('\n') - dir_file.close() + for key in mosaic_info.keys(): + target_name = mosaic_info[key]['original_target'] + dir_file.write('%s\t%s\n' % (key, target_name)) - # Write out linmos keys + dir_file.write('\n') + dir_file.close() - os.system('rm -rf %s' % linmos_key_file_name) - linmos_file = open(linmos_key_file_name, 'w+') + # Write out linmos keys - for key in mosaic_info.keys(): - target_name = mosaic_info[key]['original_target'] - linmos_file.write('%s\t%s\n' % (target_name, key)) + os.system('rm -rf %s' % linmos_key_file_name) + linmos_file = open(linmos_key_file_name, 'w+') - linmos_file.write('\n') - linmos_file.close() + for key in mosaic_info.keys(): + target_name = mosaic_info[key]['original_target'] + linmos_file.write('%s\t%s\n' % (target_name, key)) - # Write out singledish keys. + linmos_file.write('\n') + linmos_file.close() - if do_tp: + # Write out singledish keys. - os.system('rm -rf %s' % sd_key_file_name) - sd_file = open(sd_key_file_name, 'w+') + if do_tp: - for key in singledish_info.keys(): - product = singledish_info[key]['product'] - tp_filename = '%s_%s.fits' % (key, product) - sd_file.write('%s\t%s\t%s\n' % (key, product, tp_filename)) + os.system('rm -rf %s' % sd_key_file_name) + sd_file = open(sd_key_file_name, 'w+') - sd_file.close() + for key in singledish_info.keys(): + product = singledish_info[key]['product'] + tp_filename = '%s_%s.fits' % (key, product) + sd_file.write('%s\t%s\t%s\n' % (key, product, tp_filename)) - # Write out target definitions. Start by renaming the original, so we can pull it back in if this gets rerun + sd_file.close() - os.system('rm -rf %s_pre_AlmaDownload' % target_file_name) - os.system('mv %s %s_pre_AlmaDownload' % (target_file_name, target_file_name)) + # Write out target definitions. Start by renaming the original, so we can pull it back in if this gets rerun - target_file = open(target_file_name, 'w+') + os.system('rm -rf %s_pre_AlmaDownload' % target_file_name) + os.system('mv %s %s_pre_AlmaDownload' % (target_file_name, target_file_name)) - for target in targets: + target_file = open(target_file_name, 'w+') - ra, dec = self._kh.get_phasecenter_for_target(target=target) + for target in targets: - # Pull out velocity and width - vel, vel_width = self._kh.get_system_velocity_and_velocity_width_for_target(target=target) + ra, dec = self._kh.get_phasecenter_for_target(target=target) - target_file.write('%s\t%s\t%s\t%s\t%s\n' % (target, ra, dec, vel, vel_width)) + # Pull out velocity and width + vel, vel_width = self._kh.get_system_velocity_and_velocity_width_for_target(target=target) - for target_mosaic in mosaic_info.keys(): - if mosaic_info[target_mosaic]['original_target'] == target: - ra = mosaic_info[target_mosaic]['ra'] - dec = mosaic_info[target_mosaic]['dec'] + target_file.write('%s\t%s\t%s\t%s\t%s\n' % (target, ra, dec, vel, vel_width)) - target_file.write('%s\t%s\t%s\t%s\t%s\n' % (target_mosaic, ra, dec, vel, vel_width)) + for target_mosaic in mosaic_info.keys(): + if mosaic_info[target_mosaic]['original_target'] == target: + ra = mosaic_info[target_mosaic]['ra'] + dec = mosaic_info[target_mosaic]['dec'] - target_file.close() + target_file.write('%s\t%s\t%s\t%s\t%s\n' % (target_mosaic, ra, dec, vel, vel_width)) -else: - class AlmaDownloadHandler(object): - ''' - Define an empty class that raises an error so the package level imports - work when astroquery and astropy are not installed. - ''' - def __init__(self, args, **kwargs): - raise ImportError("Missing at least one of these dependencies: astroquery, astropy, bs4, requests.") + target_file.close() diff --git a/phangsPipeline/handlerDerived.py b/phangsPipeline/handlerDerived.py index 7e15d2bf..3c94aece 100644 --- a/phangsPipeline/handlerDerived.py +++ b/phangsPipeline/handlerDerived.py @@ -20,450 +20,400 @@ """ -import os, sys, re, shutil -import glob import logging +import os +import astropy.units as u import numpy as np +from . import handlerTemplate +from . import utilsFilenames +from .scConvolution import smooth_cube +from .scMaskingRoutines import recipe_phangs_strict_mask, recipe_phangs_broad_mask +from .scMoments import moment_generator +from .scNoiseRoutines import recipe_phangs_noise +from .scStackingRoutines import recipe_phangs_vfield, recipe_shuffle_cube + logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) -# Check casa environment by importing CASA-only packages -from .casa_check import is_casa_installed -casa_enabled = is_casa_installed() +class DerivedHandler(handlerTemplate.HandlerTemplate): + """ + Class to create signal masks based on image cubes, and then apply + the masks to make moment maps. This is done for each galaxy at + multiple spatial/angular scales. + """ -if casa_enabled: - logger.debug('casa_enabled = True') -else: - logger.debug('casa_enabled = False') + ############ + # __init__ # + ############ -# import phangs pipeline stuff -from . import utilsResolutions -from . import utilsFilenames -from . import utilsLines -from . import handlerTemplate - -try: - import astropy - import spectral_cube - has_astropy_speccube = True -except ImportError: - logger.debug("astropy not installed.") - has_astropy_speccube = False - -if has_astropy_speccube: - import astropy.units as u - from astropy.io import fits - from astropy.wcs import WCS - from spectral_cube import SpectralCube, Projection - from spectral_cube.masks import BooleanArrayMask - - from .scConvolution import smooth_cube - from .scNoiseRoutines import recipe_phangs_noise - from .scMaskingRoutines import recipe_phangs_strict_mask, recipe_phangs_broad_mask, recipe_phangs_flat_mask - from .scStackingRoutines import recipe_phangs_vfield, recipe_shuffle_cube - - #from . import scDerivativeRoutines as scderiv - from .scMoments import moment_generator - - class DerivedHandler(handlerTemplate.HandlerTemplate): + def __init__( + self, + key_handler=None, + dry_run=False, + ): + # inherit template class + handlerTemplate.HandlerTemplate.__init__(self, + key_handler=key_handler, + dry_run=dry_run) + + ######################## + # Main processing loop # + ######################## + + def loop_derive_products( + self, + do_all=False, + do_convolve=False, + do_noise=False, + do_strictmask=False, + do_broadmask=False, + do_moments=False, + do_secondary=False, + do_vfield=False, + do_shuffling=False, + do_flatmask=False, + do_flatmaps=False, + make_directories=True, + extra_ext_in='', + extra_ext_out='', + overwrite=True, + ): """ - Class to create signal masks based on image cubes, and then apply - the masks to make moment maps. This is done for each galaxy at - multiple spatial/angular scales. + Loops over the full set of targets, spectral products (note + the dual definition of "product" here), and configurations to + do the imaging. Toggle the parts of the loop using the do_XXX + booleans. Other choices affect algorithms used. """ + if do_all: + do_convolve = True + do_noise = True + do_strictmask = True + do_broadmask = True + do_moments = True + do_secondary = True + do_vfield = True, + do_shuffling = True + do_flatmask = True + do_flatmaps = True - ############ - # __init__ # - ############ - - def __init__( - self, - key_handler = None, - dry_run = False, - ): - # inherit template class - handlerTemplate.HandlerTemplate.__init__(self, - key_handler = key_handler, - dry_run = dry_run) - - ######################## - # Main processing loop # - ######################## - - def loop_derive_products( - self, - do_all=False, - do_convolve=False, - do_noise=False, - do_strictmask=False, - do_broadmask=False, - do_moments=False, - do_secondary=False, - do_vfield=False, - do_shuffling=False, - do_flatmask=False, - do_flatmaps=False, - make_directories=True, - extra_ext_in='', - extra_ext_out='', - overwrite=True, - ): - """ - Loops over the full set of targets, spectral products (note - the dual definition of "product" here), and configurations to - do the imaging. Toggle the parts of the loop using the do_XXX - booleans. Other choices affect algorithms used. - """ - - if do_all: - do_convolve = True - do_noise = True - do_strictmask = True - do_broadmask = True - do_moments = True - do_secondary = True - do_vfield=True, - do_shuffling = True - do_flatmask = True - do_flatmaps = True - - # Error checking - - if len(self.get_targets()) == 0: - logger.error("Need a target list.") - return(None) - - if len(self.get_all_products()) == 0: - logger.error("Need a products list.") - return(None) - - # If requested, make the directories - - if make_directories: - self._kh.make_missing_directories(derived = True) - - # Convolve the data to all requested angular and physical resolutions. - - if do_convolve: - - for this_target, this_product, this_config in \ - self.looper(do_targets=True,do_products=True,do_configs=True): - - # Always start with the native resolution + # Error checking - self.task_convolve( - target=this_target, config=this_config, product=this_product, - just_copy = True, overwrite=overwrite) - - # Loop over all angular and physical resolutions. - - res_dict = self._kh.get_ang_res_dict( - config=this_config,product=this_product) - res_list = list(res_dict) - if len(res_list) > 0: - res_list.sort() - for this_res_tag in res_list: - this_res_value = res_dict[this_res_tag] - self.task_convolve( - target=this_target, config=this_config, product=this_product, - res_tag=this_res_tag,res_value=this_res_value,res_type='ang', - overwrite=overwrite) - - res_dict = self._kh.get_phys_res_dict( - config=this_config,product=this_product) - res_list = list(res_dict) - if len(res_list) > 0: - res_list.sort() - for this_res_tag in res_list: - this_res_value = res_dict[this_res_tag] - self.task_convolve( - target=this_target, config=this_config, product=this_product, - res_tag=this_res_tag,res_value=this_res_value,res_type='phys', - overwrite=overwrite) - - # Estimate the noise for each cube. - - if do_noise: - - for this_target, this_product, this_config in \ - self.looper(do_targets=True,do_products=True,do_configs=True): - - # Always start with the native resolution + if len(self.get_targets()) == 0: + logger.error("Need a target list.") + return (None) - self.task_estimate_noise( - target=this_target, config=this_config, product=this_product, - overwrite=overwrite) + if len(self.get_all_products()) == 0: + logger.error("Need a products list.") + return (None) - # Loop over all angular and physical resolutions. + # If requested, make the directories - res_dict = self._kh.get_ang_res_dict( - config=this_config,product=this_product) - res_list = list(res_dict) - if len(res_list) > 0: - res_list.sort() - for this_res_tag in res_list: + if make_directories: + self._kh.make_missing_directories(derived=True) - self.task_estimate_noise( - target=this_target, config=this_config, product=this_product, - res_tag=this_res_tag, overwrite=overwrite) + # Convolve the data to all requested angular and physical resolutions. - res_dict = self._kh.get_phys_res_dict( - config=this_config,product=this_product) - res_list = list(res_dict) - if len(res_list) > 0: - res_list.sort() - for this_res_tag in res_list: + if do_convolve: - self.task_estimate_noise( - target=this_target, config=this_config, product=this_product, - res_tag=this_res_tag, overwrite=overwrite) + for this_target, this_product, this_config in \ + self.looper(do_targets=True, do_products=True, do_configs=True): - # Make "strict" signal masks for each cube + # Always start with the native resolution - if do_strictmask: + self.task_convolve( + target=this_target, config=this_config, product=this_product, + just_copy=True, overwrite=overwrite) - for this_target, this_product, this_config in \ - self.looper(do_targets=True,do_products=True,do_configs=True): + # Loop over all angular and physical resolutions. - # Always start with the native resolution + res_dict = self._kh.get_ang_res_dict( + config=this_config, product=this_product) + res_list = list(res_dict) + if len(res_list) > 0: + res_list.sort() + for this_res_tag in res_list: + this_res_value = res_dict[this_res_tag] + self.task_convolve( + target=this_target, config=this_config, product=this_product, + res_tag=this_res_tag, res_value=this_res_value, res_type='ang', + overwrite=overwrite) - self.task_build_strict_mask( + res_dict = self._kh.get_phys_res_dict( + config=this_config, product=this_product) + res_list = list(res_dict) + if len(res_list) > 0: + res_list.sort() + for this_res_tag in res_list: + this_res_value = res_dict[this_res_tag] + self.task_convolve( target=this_target, config=this_config, product=this_product, - overwrite=overwrite, res_tag=None) + res_tag=this_res_tag, res_value=this_res_value, res_type='phys', + overwrite=overwrite) + + # Estimate the noise for each cube. + + if do_noise: + + for this_target, this_product, this_config in \ + self.looper(do_targets=True, do_products=True, do_configs=True): + + # Always start with the native resolution - # Loop over all angular and physical resolutions. + self.task_estimate_noise( + target=this_target, config=this_config, product=this_product, + overwrite=overwrite) - for this_res in self._kh.get_ang_res_dict( - config=this_config,product=this_product): + # Loop over all angular and physical resolutions. - self.task_build_strict_mask( - target=this_target, config=this_config, product=this_product, - overwrite=overwrite, res_tag=this_res) + res_dict = self._kh.get_ang_res_dict( + config=this_config, product=this_product) + res_list = list(res_dict) + if len(res_list) > 0: + res_list.sort() + for this_res_tag in res_list: + self.task_estimate_noise( + target=this_target, config=this_config, product=this_product, + res_tag=this_res_tag, overwrite=overwrite) + + res_dict = self._kh.get_phys_res_dict( + config=this_config, product=this_product) + res_list = list(res_dict) + if len(res_list) > 0: + res_list.sort() + for this_res_tag in res_list: + self.task_estimate_noise( + target=this_target, config=this_config, product=this_product, + res_tag=this_res_tag, overwrite=overwrite) - for this_res in self._kh.get_phys_res_dict( - config=this_config,product=this_product): + # Make "strict" signal masks for each cube - self.task_build_strict_mask( - target=this_target, config=this_config, product=this_product, - overwrite=overwrite, res_tag=this_res) + if do_strictmask: - # Make "broad" combination masks. + for this_target, this_product, this_config in \ + self.looper(do_targets=True, do_products=True, do_configs=True): - if do_broadmask: + # Always start with the native resolution - for this_target, this_product, this_config in \ - self.looper(do_targets=True,do_products=True,do_configs=True): + self.task_build_strict_mask( + target=this_target, config=this_config, product=this_product, + overwrite=overwrite, res_tag=None) - # Only build one broad mask that covers all resolutions + # Loop over all angular and physical resolutions. - self.task_build_broad_mask( + for this_res in self._kh.get_ang_res_dict( + config=this_config, product=this_product): + self.task_build_strict_mask( target=this_target, config=this_config, product=this_product, - overwrite=overwrite, res_tag=None) + overwrite=overwrite, res_tag=this_res) - # Make "moments" - derived data products. + for this_res in self._kh.get_phys_res_dict( + config=this_config, product=this_product): + self.task_build_strict_mask( + target=this_target, config=this_config, product=this_product, + overwrite=overwrite, res_tag=this_res) - if do_moments: + # Make "broad" combination masks. - for this_target, this_product, this_config in \ - self.looper(do_targets=True,do_products=True,do_configs=True): + if do_broadmask: - # Always start with the native resolution + for this_target, this_product, this_config in \ + self.looper(do_targets=True, do_products=True, do_configs=True): + # Only build one broad mask that covers all resolutions - self.task_generate_moments( - target=this_target, product=this_product, config=this_config, - res_tag=None, overwrite=overwrite) + self.task_build_broad_mask( + target=this_target, config=this_config, product=this_product, + overwrite=overwrite, res_tag=None) - # Loop over all angular and physical resolutions. + # Make "moments" - derived data products. - for this_res in self._kh.get_ang_res_dict( - config=this_config,product=this_product): + if do_moments: - self.task_generate_moments( - target=this_target, product=this_product, config=this_config, - res_tag=this_res, overwrite=overwrite) + for this_target, this_product, this_config in \ + self.looper(do_targets=True, do_products=True, do_configs=True): - for this_res in self._kh.get_phys_res_dict( - config=this_config,product=this_product): + # Always start with the native resolution - self.task_generate_moments( - target=this_target, product=this_product, config=this_config, - res_tag=this_res, overwrite=overwrite) + self.task_generate_moments( + target=this_target, product=this_product, config=this_config, + res_tag=None, overwrite=overwrite) - if do_secondary: - for this_target, this_product, this_config in \ - self.looper(do_targets=True,do_products=True,do_configs=True): + # Loop over all angular and physical resolutions. - # Loop over all angular and physical resolutions. + for this_res in self._kh.get_ang_res_dict( + config=this_config, product=this_product): + self.task_generate_moments( + target=this_target, product=this_product, config=this_config, + res_tag=this_res, overwrite=overwrite) - self.task_generate_secondary_moments( + for this_res in self._kh.get_phys_res_dict( + config=this_config, product=this_product): + self.task_generate_moments( target=this_target, product=this_product, config=this_config, - res_tag=None, overwrite=overwrite) + res_tag=this_res, overwrite=overwrite) - for this_res in self._kh.get_ang_res_dict( - config=this_config,product=this_product): + if do_secondary: + for this_target, this_product, this_config in \ + self.looper(do_targets=True, do_products=True, do_configs=True): - self.task_generate_secondary_moments( - target=this_target, product=this_product, config=this_config, - res_tag=this_res, overwrite=overwrite) + # Loop over all angular and physical resolutions. - for this_res in self._kh.get_phys_res_dict( - config=this_config,product=this_product): + self.task_generate_secondary_moments( + target=this_target, product=this_product, config=this_config, + res_tag=None, overwrite=overwrite) - self.task_generate_secondary_moments( - target=this_target, product=this_product, config=this_config, - res_tag=this_res, overwrite=overwrite) + for this_res in self._kh.get_ang_res_dict( + config=this_config, product=this_product): + self.task_generate_secondary_moments( + target=this_target, product=this_product, config=this_config, + res_tag=this_res, overwrite=overwrite) - # Create velocity field. + for this_res in self._kh.get_phys_res_dict( + config=this_config, product=this_product): + self.task_generate_secondary_moments( + target=this_target, product=this_product, config=this_config, + res_tag=this_res, overwrite=overwrite) - if do_vfield: - - for this_target, this_product, this_config in \ - self.looper(do_targets=True,do_products=True,do_configs=True): + # Create velocity field. - # Only build one velocity field that covers all resolutions + if do_vfield: - self.task_build_vfield( - target=this_target, config=this_config, product=this_product, - overwrite=overwrite, res_tag=None) + for this_target, this_product, this_config in \ + self.looper(do_targets=True, do_products=True, do_configs=True): + # Only build one velocity field that covers all resolutions + self.task_build_vfield( + target=this_target, config=this_config, product=this_product, + overwrite=overwrite, res_tag=None) - # Make shuffled cube. + # Make shuffled cube. - if do_shuffling: + if do_shuffling: - for this_target, this_product, this_config in \ - self.looper(do_targets=True,do_products=True,do_configs=True): + for this_target, this_product, this_config in \ + self.looper(do_targets=True, do_products=True, do_configs=True): + # Always start with the native resolution - # Always start with the native resolution + self.task_shuffle_cube( + target=this_target, config=this_config, product=this_product, + overwrite=overwrite) - self.task_shuffle_cube( - target=this_target, config=this_config, product=this_product, - overwrite=overwrite) + # Loop over all angular and physical resolutions. + # N.B. we only want native resolution shuffled cubes; + # feel free to uncomment the lines below for other resolutions - # Loop over all angular and physical resolutions. - # N.B. we only want native resolution shuffled cubes; - # feel free to uncomment the lines below for other resolutions + # res_dict = self._kh.get_ang_res_dict( + # config=this_config,product=this_product) + # res_list = list(res_dict) + # if len(res_list) > 0: + # res_list.sort() + # for this_res_tag in res_list: - # res_dict = self._kh.get_ang_res_dict( - # config=this_config,product=this_product) - # res_list = list(res_dict) - # if len(res_list) > 0: - # res_list.sort() - # for this_res_tag in res_list: + # self.task_shuffle_cube( + # target=this_target, config=this_config, product=this_product, + # res_tag=this_res_tag, overwrite=overwrite) - # self.task_shuffle_cube( - # target=this_target, config=this_config, product=this_product, - # res_tag=this_res_tag, overwrite=overwrite) + # res_dict = self._kh.get_phys_res_dict( + # config=this_config,product=this_product) + # res_list = list(res_dict) + # if len(res_list) > 0: + # res_list.sort() + # for this_res_tag in res_list: - # res_dict = self._kh.get_phys_res_dict( - # config=this_config,product=this_product) - # res_list = list(res_dict) - # if len(res_list) > 0: - # res_list.sort() - # for this_res_tag in res_list: + # self.task_shuffle_cube( + # target=this_target, config=this_config, product=this_product, + # res_tag=this_res_tag, overwrite=overwrite) - # self.task_shuffle_cube( - # target=this_target, config=this_config, product=this_product, - # res_tag=this_res_tag, overwrite=overwrite) + # Make "flat masks" for each cube - # Make "flat masks" for each cube + if do_flatmask: - if do_flatmask: + for this_target, this_product, this_config in \ + self.looper(do_targets=True, do_products=True, do_configs=True): - for this_target, this_product, this_config in \ - self.looper(do_targets=True,do_products=True,do_configs=True): + # + # Flat Strict Mask - # - # Flat Strict Mask + # Always start with the native resolution - # Always start with the native resolution + self.task_build_flat_strict_mask( + target=this_target, config=this_config, product=this_product, + overwrite=overwrite, res_tag=None) + # Loop over all angular and physical resolutions. + + for this_res in self._kh.get_ang_res_dict( + config=this_config, product=this_product): self.task_build_flat_strict_mask( target=this_target, config=this_config, product=this_product, - overwrite=overwrite, res_tag=None) + overwrite=overwrite, res_tag=this_res) - # Loop over all angular and physical resolutions. - - for this_res in self._kh.get_ang_res_dict( - config=this_config,product=this_product): + for this_res in self._kh.get_phys_res_dict( + config=this_config, product=this_product): + self.task_build_flat_strict_mask( + target=this_target, config=this_config, product=this_product, + overwrite=overwrite, res_tag=this_res) - self.task_build_flat_strict_mask( - target=this_target, config=this_config, product=this_product, - overwrite=overwrite, res_tag=this_res) + # + # Flat Broad Mask - for this_res in self._kh.get_phys_res_dict( - config=this_config,product=this_product): + # Always start with the native resolution - self.task_build_flat_strict_mask( - target=this_target, config=this_config, product=this_product, - overwrite=overwrite, res_tag=this_res) - - # - # Flat Broad Mask + self.task_build_flat_broad_mask( + target=this_target, config=this_config, product=this_product, + overwrite=overwrite, res_tag=None) - # Always start with the native resolution + # Loop over all angular and physical resolutions. + for this_res in self._kh.get_ang_res_dict( + config=this_config, product=this_product): self.task_build_flat_broad_mask( target=this_target, config=this_config, product=this_product, - overwrite=overwrite, res_tag=None) - - # Loop over all angular and physical resolutions. + overwrite=overwrite, res_tag=this_res) - for this_res in self._kh.get_ang_res_dict( - config=this_config,product=this_product): - - self.task_build_flat_broad_mask( - target=this_target, config=this_config, product=this_product, - overwrite=overwrite, res_tag=this_res) + for this_res in self._kh.get_phys_res_dict( + config=this_config, product=this_product): + self.task_build_flat_broad_mask( + target=this_target, config=this_config, product=this_product, + overwrite=overwrite, res_tag=this_res) - for this_res in self._kh.get_phys_res_dict( - config=this_config,product=this_product): + # Make "flat maps" - derived data products. - self.task_build_flat_broad_mask( - target=this_target, config=this_config, product=this_product, - overwrite=overwrite, res_tag=this_res) + if do_flatmaps: - # Make "flat maps" - derived data products. + for this_target, this_product, this_config in \ + self.looper(do_targets=True, do_products=True, do_configs=True): - if do_flatmaps: + # Always start with the native resolution - for this_target, this_product, this_config in \ - self.looper(do_targets=True,do_products=True,do_configs=True): + self.task_generate_flatmaps( + target=this_target, product=this_product, config=this_config, + res_tag=None, overwrite=overwrite) - # Always start with the native resolution + # Loop over all angular and physical resolutions. + for this_res in self._kh.get_ang_res_dict( + config=this_config, product=this_product): self.task_generate_flatmaps( target=this_target, product=this_product, config=this_config, - res_tag=None, overwrite=overwrite) - - # Loop over all angular and physical resolutions. - - for this_res in self._kh.get_ang_res_dict( - config=this_config,product=this_product): - - self.task_generate_flatmaps( - target=this_target, product=this_product, config=this_config, - res_tag=this_res, overwrite=overwrite) + res_tag=this_res, overwrite=overwrite) - for this_res in self._kh.get_phys_res_dict( - config=this_config,product=this_product): + for this_res in self._kh.get_phys_res_dict( + config=this_config, product=this_product): + self.task_generate_flatmaps( + target=this_target, product=this_product, config=this_config, + res_tag=this_res, overwrite=overwrite) - self.task_generate_flatmaps( - target=this_target, product=this_product, config=this_config, - res_tag=this_res, overwrite=overwrite) - # end of loop + ########################################### + # Defined file names for various products # + ########################################### - ########################################### - # Defined file names for various products # - ########################################### - - def _fname_dict( + def _fname_dict( self, target=None, config=None, @@ -471,706 +421,882 @@ def _fname_dict( res_tag=None, extra_ext_in='', extra_ext_out='', - ): - """ - Function to define file names used in other functions. - """ + ): + """ + Function to define file names used in other functions. + """ - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Error checking - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Error checking + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - if target is None: - raise Exception("Need a target.") - if product is None: - raise Exception("Need a product.") - if config is None: - raise Exception("Need a config.") + if target is None: + raise Exception("Need a target.") + if product is None: + raise Exception("Need a product.") + if config is None: + raise Exception("Need a config.") - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Initialize - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Initialize + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # The output is a nested dictionary structure, for each cube - # resolution (res_tag) + # The output is a nested dictionary structure, for each cube + # resolution (res_tag) - fname_dict = {} + fname_dict = {} - # Resolution string (if any, not required) - fname_dict['res_tag'] = res_tag - if res_tag is None: - res_tag = '' + # Resolution string (if any, not required) + fname_dict['res_tag'] = res_tag + if res_tag is None: + res_tag = '' - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Original files - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Original files + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - orig_filename = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'pbcorr_trimmed_k'+extra_ext_in, - casa = False) + orig_filename = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='pbcorr_trimmed_k' + extra_ext_in, + casa=False) - fname_dict['orig'] = orig_filename + fname_dict['orig'] = orig_filename - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Output Convolved Cubes - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Output Convolved Cubes + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - cube_filename = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = res_tag+extra_ext_out, - casa = False) + cube_filename = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext=res_tag + extra_ext_out, + casa=False) - coverage_filename = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = res_tag+'_coverage'+extra_ext_out, - casa = False) + coverage_filename = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext=res_tag + '_coverage' + extra_ext_out, + casa=False) - coverage2d_filename = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = res_tag+'_coverage2d'+extra_ext_out, - casa = False) + coverage2d_filename = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext=res_tag + '_coverage2d' + extra_ext_out, + casa=False) - fname_dict['cube'] = cube_filename - fname_dict['coverage'] = coverage_filename - fname_dict['coverage2d'] = coverage2d_filename + fname_dict['cube'] = cube_filename + fname_dict['coverage'] = coverage_filename + fname_dict['coverage2d'] = coverage2d_filename - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Velocity field - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Velocity field + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - vfield_filename = self._kh.get_vfield_file_for_target(target=target) - - fname_dict['vfield'] = vfield_filename + vfield_filename = self._kh.get_vfield_file_for_target(target=target) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Shuffled Cubes - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + fname_dict['vfield'] = vfield_filename - shuffled_filename = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = res_tag+extra_ext_out+'_shuffled', - casa = False) + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Shuffled Cubes + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - fname_dict['shuffled'] = shuffled_filename + shuffled_filename = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext=res_tag + extra_ext_out + '_shuffled', + casa=False) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Noise Cubes - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + fname_dict['shuffled'] = shuffled_filename - noise_filename = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = res_tag+extra_ext_out+'_noise', - casa = False) + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Noise Cubes + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - fname_dict['noise'] = noise_filename + noise_filename = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext=res_tag + extra_ext_out + '_noise', + casa=False) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Signal Mask - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + fname_dict['noise'] = noise_filename - # This differs by resolution + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Signal Mask + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - strictmask_filename = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = res_tag+extra_ext_out+'_strictmask', - casa = False) + # This differs by resolution - fname_dict['strictmask'] = strictmask_filename + strictmask_filename = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext=res_tag + extra_ext_out + '_strictmask', + casa=False) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Broad / Hybrid Mask - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + fname_dict['strictmask'] = strictmask_filename - # Note that the broadmask is the same across all resolutions. + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Broad / Hybrid Mask + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - broadmask_filename = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = extra_ext_out+'_broadmask', - casa = False) + # Note that the broadmask is the same across all resolutions. - fname_dict['broadmask'] = broadmask_filename + broadmask_filename = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext=extra_ext_out + '_broadmask', + casa=False) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Flat Strict Mask - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + fname_dict['broadmask'] = broadmask_filename - flatstrictmask_filename = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = extra_ext_out+'_flatstrictmask', - casa = False) + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Flat Strict Mask + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - fname_dict['flatstrictmask'] = flatstrictmask_filename + flatstrictmask_filename = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext=extra_ext_out + '_flatstrictmask', + casa=False) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Flat Broad Mask - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + fname_dict['flatstrictmask'] = flatstrictmask_filename - flatbroadmask_filename = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = extra_ext_out+'_flatbroadmask', - casa = False) + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Flat Broad Mask + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - fname_dict['flatbroadmask'] = flatbroadmask_filename + flatbroadmask_filename = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext=extra_ext_out + '_flatbroadmask', + casa=False) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Moments - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + fname_dict['flatbroadmask'] = flatbroadmask_filename - # Just note the root of the moment file name. There are a lot - # of extensions that will be filled in by the programs. + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Moments + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - moment_root = utilsFilenames.get_cube_filename( - target=target, config=config, product=product, - ext=res_tag+extra_ext_out) - moment_root = moment_root.replace('.fits','') - fname_dict['momentroot'] = moment_root + # Just note the root of the moment file name. There are a lot + # of extensions that will be filled in by the programs. - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Return - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + moment_root = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext=res_tag + extra_ext_out) + moment_root = moment_root.replace('.fits', '') + fname_dict['momentroot'] = moment_root - return(fname_dict) + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Return + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - ################################################################## - # Tasks - discrete steps on target, product, config combinations # - ################################################################## + return (fname_dict) - # region convolve + ################################################################## + # Tasks - discrete steps on target, product, config combinations # + ################################################################## - def task_convolve( + # region convolve + + def task_convolve( self, - target = None, - config = None, - product = None, - res_tag = None, - res_value = None, - res_type = 'ang', - just_copy = False, - extra_ext_in = '', - extra_ext_out = '', - overwrite = False, + target=None, + config=None, + product=None, + res_tag=None, + res_value=None, + res_type='ang', + just_copy=False, + extra_ext_in='', + extra_ext_out='', + overwrite=False, tol=0.1, nan_treatment='interpolate', - ): - """ - Convolve data to lower resolutions. Defaults to copying in some cases. - """ + ): + """ + Convolve data to lower resolutions. Defaults to copying in some cases. + """ - # Parse the input resolution - if not just_copy: + # Parse the input resolution + if not just_copy: - if res_value is None: - logger.warning("Need an input resolution.") - logger.warning("Defaulting to copy mode.") - just_copy = True + if res_value is None: + logger.warning("Need an input resolution.") + logger.warning("Defaulting to copy mode.") + just_copy = True - if res_type.lower() not in ['ang','phys']: - logger.warning("Input resolution can be angular or physical, ang or phys .") - logger.warning("Defaulting to copy mode.") - just_copy = True + if res_type.lower() not in ['ang', 'phys']: + logger.warning("Input resolution can be angular or physical, ang or phys .") + logger.warning("Defaulting to copy mode.") + just_copy = True - if res_tag is None: - logger.warning("Need a resolution tag to avoid overlapping file names.") - logger.warning("Defaulting to copy mode.") - just_copy = True + if res_tag is None: + logger.warning("Need a resolution tag to avoid overlapping file names.") + logger.warning("Defaulting to copy mode.") + just_copy = True + + # Generate file names - # Generate file names + indir = self._kh.get_postprocess_dir_for_target(target=target, changeto=False) + indir = os.path.abspath(indir) + '/' - indir = self._kh.get_postprocess_dir_for_target(target=target, changeto=False) - indir = os.path.abspath(indir)+'/' + outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + outdir = os.path.abspath(outdir) + '/' - outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - outdir = os.path.abspath(outdir)+'/' + fname_dict_in = self._fname_dict( + target=target, config=config, product=product, res_tag=None, + extra_ext_in=extra_ext_in) - fname_dict_in = self._fname_dict( + if just_copy: + fname_dict_out = self._fname_dict( target=target, config=config, product=product, res_tag=None, - extra_ext_in=extra_ext_in) + extra_ext_out=extra_ext_out) + else: + fname_dict_out = self._fname_dict( + target=target, config=config, product=product, res_tag=res_tag, + extra_ext_out=extra_ext_out) + + input_file = fname_dict_in['orig'] + outfile = fname_dict_out['cube'] + coveragefile = fname_dict_out['coverage'] + coverage2dfile = fname_dict_out['coverage2d'] + + # Check input file existence + + if not (os.path.isfile(indir + input_file)): + logger.warning("Missing " + indir + input_file) + return () + + # Access keywords for mask generation + + convolve_kwargs = self._kh.get_derived_kwargs( + config=config, product=product, kwarg_type='convolve_kw' + ) + + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Copying or convolving cube for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + if just_copy: + logger.info("... mode is just copying.") + else: + logger.info("... mode is convolving.") + logger.info("... to resolution tag: " + res_tag) + logger.info("... which is resolution type: " + res_type) + logger.info("... and value: " + str(res_value)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") + + logger.info("Input file: " + input_file) + logger.info("Target file: " + outfile) + logger.info("Coverage file: " + coveragefile) + logger.info("Keyword arguments: " + str(convolve_kwargs)) + + if (not self._dry_run): if just_copy: - fname_dict_out = self._fname_dict( - target=target, config=config, product=product, res_tag=None, - extra_ext_out=extra_ext_out) + + if overwrite: + os.system('rm -rf ' + outdir + outfile) + + if (os.path.isfile(outdir + outfile)): + logger.warning("Target file already present " + outdir + outfile) + return () + + os.system('cp -r ' + indir + input_file + ' ' + outdir + outfile) + else: - fname_dict_out = self._fname_dict( - target=target, config=config, product=product, res_tag=res_tag, - extra_ext_out=extra_ext_out) - input_file = fname_dict_in['orig'] - outfile = fname_dict_out['cube'] - coveragefile = fname_dict_out['coverage'] - coverage2dfile = fname_dict_out['coverage2d'] + if 'tol' in convolve_kwargs: + tol = convolve_kwargs['tol'] + + if 'nan_treatment' in convolve_kwargs: + nan_treatment = convolve_kwargs['nan_treatment'] + + if res_type == 'ang': + input_res_value = res_value * u.arcsec + smooth_cube(incube=indir + input_file, outfile=outdir + outfile, + angular_resolution=input_res_value, + tol=tol, nan_treatment=nan_treatment, + make_coverage_cube=True, coveragefile=outdir + coveragefile, + collapse_coverage=True, coverage2dfile=outdir + coverage2dfile, + overwrite=overwrite) + + if res_type == 'phys': + this_distance = self._kh.get_distance_for_target(target) + if this_distance is None: + logger.error("No distance for target " + target) + return () + this_distance = this_distance * 1e6 * u.pc + input_res_value = res_value * u.pc + smooth_cube(incube=indir + input_file, outfile=outdir + outfile, + linear_resolution=input_res_value, distance=this_distance, + tol=tol, nan_treatment=nan_treatment, + make_coverage_cube=True, coveragefile=outdir + coveragefile, + collapse_coverage=True, + overwrite=overwrite) + + return () + + # endregion + + # region noise + + def task_estimate_noise( + self, + target=None, + config=None, + product=None, + res_tag=None, + extra_ext='', + overwrite=False, + ): + """ + Estimate the noise associated with a data cube and save it to disk. + """ + + # Generate file names - # Check input file existence + indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + indir = os.path.abspath(indir) + '/' - if not (os.path.isfile(indir+input_file)): - logger.warning("Missing "+indir+input_file) - return() + outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + outdir = os.path.abspath(outdir) + '/' - # Access keywords for mask generation + fname_dict = self._fname_dict( + target=target, config=config, product=product, res_tag=res_tag, + extra_ext_in=extra_ext) - convolve_kwargs = self._kh.get_derived_kwargs( - config=config, product=product, kwarg_type='convolve_kw' - ) + input_file = fname_dict['cube'] + outfile = fname_dict['noise'] - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Copying or convolving cube for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - if just_copy: - logger.info("... mode is just copying.") - else: - logger.info("... mode is convolving.") - logger.info("... to resolution tag: "+res_tag) - logger.info("... which is resolution type: "+res_type) - logger.info("... and value: "+str(res_value)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") + # Check input file existence - logger.info("Input file: "+input_file) - logger.info("Target file: "+outfile) - logger.info("Coverage file: "+coveragefile) - logger.info("Keyword arguments: "+str(convolve_kwargs)) + if not (os.path.isfile(indir + input_file)): + logger.warning("Missing " + indir + input_file) + return () - if (not self._dry_run): + # Access keywords for noise generation - if just_copy: + noise_kwargs = self._kh.get_derived_kwargs( + config=config, product=product, kwarg_type='noise_kw') - if overwrite: - os.system('rm -rf '+outdir+outfile) + # Report - if (os.path.isfile(outdir+outfile)): - logger.warning("Target file already present "+outdir+outfile) - return() + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Running a noise estimate for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") - os.system('cp -r '+indir+input_file+' '+outdir+outfile) + logger.info("Input file: " + input_file) + logger.info("Target file: " + outfile) + logger.info("Keyword arguments: " + str(noise_kwargs)) - else: + # Call noise routines + + if (not self._dry_run): + recipe_phangs_noise( + incube=indir + input_file, + outfile=outdir + outfile, + noise_kwargs=noise_kwargs, + return_spectral_cube=False, + overwrite=overwrite) + + # endregion - if 'tol' in convolve_kwargs: - tol = convolve_kwargs['tol'] - - if 'nan_treatment' in convolve_kwargs: - nan_treatment = convolve_kwargs['nan_treatment'] - - if res_type == 'ang': - input_res_value = res_value*u.arcsec - smooth_cube(incube=indir+input_file, outfile=outdir+outfile, - angular_resolution=input_res_value, - tol=tol, nan_treatment=nan_treatment, - make_coverage_cube=True, coveragefile=outdir+coveragefile, - collapse_coverage=True, coverage2dfile=outdir+coverage2dfile, - overwrite=overwrite) - - if res_type == 'phys': - this_distance = self._kh.get_distance_for_target(target) - if this_distance is None: - logger.error("No distance for target "+target) - return() - this_distance = this_distance*1e6*u.pc - input_res_value = res_value*u.pc - smooth_cube(incube=indir+input_file, outfile=outdir+outfile, - linear_resolution=input_res_value, distance=this_distance, - tol=tol, nan_treatment=nan_treatment, - make_coverage_cube=True, coveragefile=outdir+coveragefile, - collapse_coverage=True, - overwrite=overwrite) - - return() - # endregion - - # region noise - - def task_estimate_noise( + # region strict mask + + def task_build_strict_mask( self, - target = None, - config = None, - product = None, - res_tag = None, - extra_ext = '', - overwrite = False, - ): - """ - Estimate the noise associated with a data cube and save it to disk. - """ + target=None, + config=None, + product=None, + res_tag=None, + extra_ext='', + overwrite=False, + ): + """ + Estimate the noise associated with a data cube and save it to disk. + """ - # Generate file names + # Generate file names - indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - indir = os.path.abspath(indir)+'/' + indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + indir = os.path.abspath(indir) + '/' - outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - outdir = os.path.abspath(outdir)+'/' + outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + outdir = os.path.abspath(outdir) + '/' - fname_dict = self._fname_dict( - target=target, config=config, product=product, res_tag=res_tag, - extra_ext_in=extra_ext) + fname_dict = self._fname_dict( + target=target, config=config, product=product, res_tag=res_tag, + extra_ext_in=extra_ext) - input_file = fname_dict['cube'] - outfile = fname_dict['noise'] + input_file = fname_dict['cube'] + noise_file = fname_dict['noise'] + coverage_file = fname_dict['coverage'] + coverage2d_file = fname_dict['coverage2d'] - # Check input file existence + outfile = fname_dict['strictmask'] - if not (os.path.isfile(indir+input_file)): - logger.warning("Missing "+indir+input_file) - return() + # Check input file existence - # Access keywords for noise generation + if not (os.path.isfile(indir + input_file)): + logger.warning("Missing cube: " + indir + input_file) + return () - noise_kwargs = self._kh.get_derived_kwargs( - config=config, product=product, kwarg_type='noise_kw') + if not (os.path.isfile(indir + noise_file)): + logger.warning("Missing noise estimate: " + indir + noise_file) + return () - # Report + # Coverage - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Running a noise estimate for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") + if not (os.path.isfile(indir + coverage_file)): + logger.warning("Missing coverage estimate: " + indir + coverage_file) + logger.warning("This may be fine. Proceeding.") + coverage_file = None - logger.info("Input file: "+input_file) - logger.info("Target file: "+outfile) - logger.info("Keyword arguments: "+str(noise_kwargs)) + # Access keywords for mask generation - # Call noise routines + strictmask_kwargs = self._kh.get_derived_kwargs( + config=config, product=product, kwarg_type='strictmask_kw' + ) - if (not self._dry_run): + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Report + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - recipe_phangs_noise( - incube=indir+input_file, - outfile=outdir+outfile, - noise_kwargs=noise_kwargs, - return_spectral_cube=False, - overwrite=overwrite) - # endregion + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Creating a strict mask for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") + + logger.info("Input file: " + input_file) + logger.info("Noise file: " + noise_file) + if coverage_file is not None: + logger.info("Coverage file: " + coverage_file) + logger.info("Target file: " + outfile) + logger.info("Keyword arguments: " + str(strictmask_kwargs)) + + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Call the masking routines + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + + if (not self._dry_run): + + # ... put the directory into the name to allow it to remain + # None when missing. + if coverage_file is not None: + coverage_file_in = indir + coverage_file + else: + coverage_file_in = None + + recipe_phangs_strict_mask( + indir + input_file, + innoise=indir + noise_file, + coverage=coverage_file_in, + outfile=outdir + outfile, + mask_kwargs=strictmask_kwargs, + return_spectral_cube=False, + overwrite=overwrite) + + # endregion - # region strict mask + # region broad mask - def task_build_strict_mask( + def task_build_broad_mask( self, - target = None, - config = None, - product = None, - res_tag = None, - extra_ext = '', - overwrite = False, - ): - """ - Estimate the noise associated with a data cube and save it to disk. - """ + target=None, + config=None, + product=None, + res_tag=None, + extra_ext='', + overwrite=False, + ): + """ + Estimate the noise associated with a data cube and save it to disk. + """ - # Generate file names + # Generate file names - indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - indir = os.path.abspath(indir)+'/' + indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + indir = os.path.abspath(indir) + '/' - outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - outdir = os.path.abspath(outdir)+'/' + outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + outdir = os.path.abspath(outdir) + '/' - fname_dict = self._fname_dict( - target=target, config=config, product=product, res_tag=res_tag, - extra_ext_in=extra_ext) + fname_dict = self._fname_dict( + target=target, config=config, product=product, res_tag=res_tag, + extra_ext_in=extra_ext) - input_file = fname_dict['cube'] - noise_file = fname_dict['noise'] - coverage_file = fname_dict['coverage'] - coverage2d_file = fname_dict['coverage2d'] + input_file = fname_dict['strictmask'] + outfile = fname_dict['broadmask'] - outfile = fname_dict['strictmask'] + # Check input file existence - # Check input file existence + if not (os.path.isfile(indir + input_file)): + logger.warning("Missing cube: " + indir + input_file) + return () - if not (os.path.isfile(indir+input_file)): - logger.warning("Missing cube: "+indir+input_file) - return() + # Access keywords for mask generation - if not (os.path.isfile(indir+noise_file)): - logger.warning("Missing noise estimate: "+indir+noise_file) - return() + broadmask_kwargs = self._kh.get_derived_kwargs( + config=config, product=product, kwarg_type='broadmask_kw' + ) - # Coverage + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Create the list of masks to combine + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - if not (os.path.isfile(indir+coverage_file)): - logger.warning("Missing coverage estimate: "+indir+coverage_file) - logger.warning("This may be fine. Proceeding.") - coverage_file = None + list_of_masks = [] - # Access keywords for mask generation + linked_configs = self._kh.get_linked_mask_configs( + config=config, product=product) - strictmask_kwargs = self._kh.get_derived_kwargs( - config=config, product=product, kwarg_type='strictmask_kw' - ) + if config not in linked_configs: + linked_configs.append(config) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Report - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + for cross_config in linked_configs: - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Creating a strict mask for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") + fname_dict = self._fname_dict( + target=target, config=cross_config, product=product, res_tag=None, + extra_ext_in=extra_ext) - logger.info("Input file: "+input_file) - logger.info("Noise file: "+noise_file) - if coverage_file is not None: - logger.info("Coverage file: "+coverage_file) - logger.info("Target file: "+outfile) - logger.info("Keyword arguments: "+str(strictmask_kwargs)) + this_mask = fname_dict['strictmask'] + if this_mask not in list_of_masks: + if os.path.isfile(indir + this_mask): + list_of_masks.append(indir + this_mask) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Call the masking routines - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Loop over all angular and physical resolutions. - if (not self._dry_run): + for this_res in self._kh.get_ang_res_dict( + config=cross_config, product=product): - # ... put the directory into the name to allow it to remain - # None when missing. - if coverage_file is not None: - coverage_file_in = indir+coverage_file - else: - coverage_file_in = None - - recipe_phangs_strict_mask( - indir+input_file, - indir+noise_file, - coverage=coverage_file_in, - outfile=outdir+outfile, - mask_kwargs=strictmask_kwargs, - return_spectral_cube=False, - overwrite=overwrite) - # endregion + fname_dict = self._fname_dict( + target=target, config=cross_config, product=product, res_tag=this_res, + extra_ext_in=extra_ext) + + this_mask = fname_dict['strictmask'] + if this_mask not in list_of_masks: + if os.path.isfile(indir + this_mask): + list_of_masks.append(indir + this_mask) - # region broad mask + for this_res in self._kh.get_phys_res_dict( + config=cross_config, product=product): - def task_build_broad_mask( + fname_dict = self._fname_dict( + target=target, config=cross_config, product=product, res_tag=this_res, + extra_ext_in=extra_ext) + + this_mask = fname_dict['strictmask'] + if this_mask not in list_of_masks: + if os.path.isfile(indir + this_mask): + list_of_masks.append(indir + this_mask) + + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Report + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Creating a broad mask for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") + + logger.info("Input file: " + input_file) + logger.info("List of other masks " + str(list_of_masks)) + logger.info("Target file: " + outfile) + logger.info("Keyword arguments: " + str(broadmask_kwargs)) + + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Call the mask combining routine + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + + if (not self._dry_run): + recipe_phangs_broad_mask( + indir + input_file, + list_of_masks=list_of_masks, + outfile=outdir + outfile, + # mask_kwargs=broadmask_kwargs, + # return_spectral_cube=False, + overwrite=overwrite) + + # endregion + + # region moments + + def task_generate_moments( self, - target = None, - config = None, - product = None, - res_tag = None, - extra_ext = '', - overwrite = False, - ): - """ - Estimate the noise associated with a data cube and save it to disk. - """ + target=None, + config=None, + product=None, + res_tag=None, + extra_ext='', + overwrite=False, + ): + """ + Generate moment maps. + """ + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Look up filenames, list of moments, etc. + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Generate file names + # Generate file names - indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - indir = os.path.abspath(indir)+'/' + indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + indir = os.path.abspath(indir) + '/' - outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - outdir = os.path.abspath(outdir)+'/' + outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + outdir = os.path.abspath(outdir) + '/' - fname_dict = self._fname_dict( - target=target, config=config, product=product, res_tag=res_tag, - extra_ext_in=extra_ext) + # Filenames - input_file = fname_dict['strictmask'] - outfile = fname_dict['broadmask'] + fname_dict_nores = self._fname_dict( + target=target, config=config, product=product, res_tag=None, + extra_ext_in=extra_ext) - # Check input file existence + fname_dict = self._fname_dict( + target=target, config=config, product=product, res_tag=res_tag, + extra_ext_in=extra_ext) - if not (os.path.isfile(indir+input_file)): - logger.warning("Missing cube: "+indir+input_file) - return() + # ... broad mask never has a resolution tag - # Access keywords for mask generation + broadmask_file = fname_dict_nores['broadmask'] - broadmask_kwargs = self._kh.get_derived_kwargs( - config=config, product=product, kwarg_type='broadmask_kw' - ) + # ... files with resolution tag - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Create the list of masks to combine - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + input_file = fname_dict['cube'] + noise_file = fname_dict['noise'] + strictmask_file = fname_dict['strictmask'] - list_of_masks = [] + outroot = fname_dict['momentroot'] - linked_configs = self._kh.get_linked_mask_configs( - config=config, product=product) + # Check input file and mask existence - if config not in linked_configs: - linked_configs.append(config) + if not (os.path.isfile(indir + input_file)): + logger.warning("Missing cube: " + indir + input_file) + return () - for cross_config in linked_configs: + found_broadmask = (os.path.isfile(indir + broadmask_file)) + found_strictmask = (os.path.isfile(indir + strictmask_file)) - fname_dict = self._fname_dict( - target=target, config=cross_config, product=product, res_tag=None, - extra_ext_in=extra_ext) + # Look up which moments to calculate - this_mask = fname_dict['strictmask'] - if this_mask not in list_of_masks: - if os.path.isfile(indir+this_mask): - list_of_masks.append(indir+this_mask) + list_of_moments = self._kh.get_moment_list(config=config, product=product) - # Loop over all angular and physical resolutions. + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Report + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - for this_res in self._kh.get_ang_res_dict( - config=cross_config,product=product): + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Generating moment maps for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + if res_tag is not None: + logger.info("Resolution " + str(res_tag)) + logger.info("Found a strict mask? " + str(found_strictmask)) + logger.info("Found a broad mask? " + str(found_broadmask)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") - fname_dict = self._fname_dict( - target=target, config=cross_config, product=product, res_tag=this_res, - extra_ext_in=extra_ext) + logger.info("... input file: " + input_file) + logger.info("... noise file: " + noise_file) + logger.info("... strict mask file: " + strictmask_file) + logger.info("... broad mask file: " + broadmask_file) + logger.info("... list of moments: " + str(list_of_moments)) + logger.info("... output root: " + outroot) - this_mask = fname_dict['strictmask'] - if this_mask not in list_of_masks: - if os.path.isfile(indir+this_mask): - list_of_masks.append(indir+this_mask) + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Execute + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - for this_res in self._kh.get_phys_res_dict( - config=cross_config,product=product): + rounds = [] + for this_mom in list_of_moments: + rounds.append(self._kh.get_params_for_moment(this_mom)['round']) - fname_dict = self._fname_dict( - target=target, config=cross_config, product=product, res_tag=this_res, - extra_ext_in=extra_ext) + uniqrounds = sorted(list(set(rounds))) - this_mask = fname_dict['strictmask'] - if this_mask not in list_of_masks: - if os.path.isfile(indir+this_mask): - list_of_masks.append(indir+this_mask) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Report - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + logger.info("... Total stages of moment calculation: {0}".format(len(uniqrounds))) + logger.info("... First, calculate all moments in stage {0}".format(np.min(uniqrounds))) + logger.info("") - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Creating a broad mask for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") + sublist_of_moments = [this_mom for this_mom in list_of_moments + if self._kh.get_params_for_moment(this_mom)['round'] == uniqrounds[0]] - logger.info("Input file: "+input_file) - logger.info("List of other masks "+str(list_of_masks)) - logger.info("Target file: "+outfile) - logger.info("Keyword arguments: "+str(broadmask_kwargs)) + if (not self._dry_run): + for this_mom in sublist_of_moments: + logger.info('... generating moment: ' + str(this_mom)) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Call the mask combining routine - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + mom_params = self._kh.get_params_for_moment(this_mom) - if (not self._dry_run): + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + # Look up mask + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - recipe_phangs_broad_mask( - indir+input_file, - list_of_masks=list_of_masks, - outfile=outdir+outfile, - #mask_kwargs=broadmask_kwargs, - #return_spectral_cube=False, - overwrite=overwrite) - # endregion + if mom_params['mask'] is None: + mask_file = None + elif mom_params['mask'].strip().lower() == 'none': + mask_file = None + elif mom_params['mask'] == 'strictmask': + if not found_strictmask: + logger.warning("Strict mask needed but not found. Skipping.") + continue + mask_file = indir + strictmask_file + elif mom_params['mask'] == 'broadmask': + if not found_broadmask: + logger.warning("Broad mask needed but not found. Skipping.") + continue + mask_file = indir + broadmask_file + else: + logger.warning("Mask choice not recognized for moment: " + str(this_mom)) + logger.warning("Skipping.") + continue + + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + # Check noise + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + if not (os.path.isfile(indir + noise_file)): + logger.warning("Missing noise: " + indir + noise_file) + noise_in = None + errorfile = None + else: + noise_in = indir + noise_file + errorfile = outdir + outroot + mom_params['ext_error'] + '.fits' + + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + # Set up output file + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + outfile = outdir + outroot + mom_params['ext'] + '.fits' + + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + # In the first round, just call the moment generator + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + moment_generator( + indir + input_file, mask=mask_file, noise=noise_in, + moment=mom_params['algorithm'], momkwargs=mom_params['kwargs'], + outfile=outfile, errorfile=errorfile, + channel_correlation=None) - # region moments - - def task_generate_moments( + # endregion + + # region secondary moments + + def task_generate_secondary_moments( self, - target = None, - config = None, - product = None, - res_tag = None, - extra_ext = '', - overwrite = False, - ): - """ - Generate moment maps. - """ - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Look up filenames, list of moments, etc. - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - - # Generate file names - - indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - indir = os.path.abspath(indir)+'/' - - outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - outdir = os.path.abspath(outdir)+'/' - - # Filenames - - fname_dict_nores = self._fname_dict( - target=target, config=config, product=product, res_tag=None, - extra_ext_in=extra_ext) + target=None, + config=None, + product=None, + res_tag=None, + extra_ext='', + overwrite=False, + ): + """ + Generate moment maps. + """ + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Look up filenames, list of moments, etc. + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - fname_dict = self._fname_dict( - target=target, config=config, product=product, res_tag=res_tag, - extra_ext_in=extra_ext) + # Generate file names - # ... broad mask never has a resolution tag + indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + indir = os.path.abspath(indir) + '/' - broadmask_file = fname_dict_nores['broadmask'] + outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + outdir = os.path.abspath(outdir) + '/' - # ... files with resolution tag + # Filenames - input_file = fname_dict['cube'] - noise_file = fname_dict['noise'] - strictmask_file = fname_dict['strictmask'] + fname_dict_nores = self._fname_dict( + target=target, config=config, product=product, res_tag=None, + extra_ext_in=extra_ext) - outroot = fname_dict['momentroot'] + fname_dict = self._fname_dict( + target=target, config=config, product=product, res_tag=res_tag, + extra_ext_in=extra_ext) - # Check input file and mask existence + # ... broad mask never has a resolution tag - if not (os.path.isfile(indir+input_file)): - logger.warning("Missing cube: "+indir+input_file) - return() + broadmask_file = fname_dict_nores['broadmask'] - found_broadmask = (os.path.isfile(indir+broadmask_file)) - found_strictmask = (os.path.isfile(indir+strictmask_file)) + # ... files with resolution tag - # Look up which moments to calculate + input_file = fname_dict['cube'] + noise_file = fname_dict['noise'] + strictmask_file = fname_dict['strictmask'] - list_of_moments = self._kh.get_moment_list(config=config, product=product) + outroot = fname_dict['momentroot'] - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Report - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Check input file and mask existence - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Generating moment maps for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - if res_tag is not None: - logger.info("Resolution "+str(res_tag)) - logger.info("Found a strict mask? "+str(found_strictmask)) - logger.info("Found a broad mask? "+str(found_broadmask)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") + if not (os.path.isfile(indir + input_file)): + logger.warning("Missing cube: " + indir + input_file) + return () - logger.info("... input file: "+input_file) - logger.info("... noise file: "+noise_file) - logger.info("... strict mask file: "+strictmask_file) - logger.info("... broad mask file: "+broadmask_file) - logger.info("... list of moments: "+str(list_of_moments)) - logger.info("... output root: "+outroot) + found_broadmask = (os.path.isfile(indir + broadmask_file)) + found_strictmask = (os.path.isfile(indir + strictmask_file)) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Execute - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Look up which moments to calculate - rounds = [] - for this_mom in list_of_moments: - rounds.append(self._kh.get_params_for_moment(this_mom)['round']) + list_of_moments = self._kh.get_moment_list(config=config, product=product) - uniqrounds = sorted(list(set(rounds))) + rounds = [] + for this_mom in list_of_moments: + rounds.append(self._kh.get_params_for_moment(this_mom)['round']) + uniqrounds = sorted(list(set(rounds))) + if len(uniqrounds) == 1: logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") logger.info("") - logger.info("... Total stages of moment calculation: {0}".format(len(uniqrounds))) - logger.info("... First, calculate all moments in stage {0}".format(np.min(uniqrounds))) + logger.info("... Secondary moments requested but not specified in moment keys") + logger.info("... Returning to main loop") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") + return () + + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Report + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Generating moment maps for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + if res_tag is not None: + logger.info("Resolution " + str(res_tag)) + logger.info("Found a strict mask? " + str(found_strictmask)) + logger.info("Found a broad mask? " + str(found_broadmask)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") + + logger.info("... input file: " + input_file) + logger.info("... noise file: " + noise_file) + logger.info("... strict mask file: " + strictmask_file) + logger.info("... broad mask file: " + broadmask_file) + logger.info("... list of moments: " + str(list_of_moments)) + logger.info("... output root: " + outroot) + + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Execute + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") + + logger.info("... Total stages of moment calculation: {0}".format(len(uniqrounds))) + + for thisround in uniqrounds[1:2]: + logger.info("... Now, calculate all moments in stage {0}".format(thisround)) logger.info("") sublist_of_moments = [this_mom for this_mom in list_of_moments - if self._kh.get_params_for_moment(this_mom)['round'] == uniqrounds[0]] + if self._kh.get_params_for_moment(this_mom)['round'] == thisround] if (not self._dry_run): for this_mom in sublist_of_moments: - logger.info('... generating moment: '+str(this_mom)) + logger.info('... generating moment: ' + str(this_mom)) + + proceed = True mom_params = self._kh.get_params_for_moment(this_mom) @@ -1186,14 +1312,14 @@ def task_generate_moments( if not found_strictmask: logger.warning("Strict mask needed but not found. Skipping.") continue - mask_file = indir+strictmask_file + mask_file = indir + strictmask_file elif mom_params['mask'] == 'broadmask': if not found_broadmask: logger.warning("Broad mask needed but not found. Skipping.") continue - mask_file = indir+broadmask_file + mask_file = indir + broadmask_file else: - logger.warning("Mask choice not recognized for moment: "+str(this_mom)) + logger.warning("Mask choice not recognized for moment: " + str(this_mom)) logger.warning("Skipping.") continue @@ -1201,901 +1327,708 @@ def task_generate_moments( # Check noise # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - if not (os.path.isfile(indir+noise_file)): - logger.warning("Missing noise: "+indir+noise_file) + if not (os.path.isfile(indir + noise_file)): + logger.warning("Missing noise: " + indir + noise_file) noise_in = None errorfile = None else: - noise_in = indir+noise_file - errorfile = outdir+outroot+mom_params['ext_error']+'.fits' + noise_in = indir + noise_file + errorfile = outdir + outroot + mom_params['ext_error'] + '.fits' # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Set up output file # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - outfile = outdir+outroot+mom_params['ext']+'.fits' + outfile = outdir + outroot + mom_params['ext'] + '.fits' # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - # In the first round, just call the moment generator + # Look up maps to pass and build the kwarg list # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - moment_generator( - indir+input_file, mask=mask_file, noise=noise_in, - moment=mom_params['algorithm'], momkwargs=mom_params['kwargs'], - outfile=outfile, errorfile=errorfile, - channel_correlation=None) - # endregion - - # region secondary moments - - def task_generate_secondary_moments( - self, - target = None, - config = None, - product = None, - res_tag = None, - extra_ext = '', - overwrite = False, - ): - """ - Generate moment maps. - """ - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Look up filenames, list of moments, etc. - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - - # Generate file names - - indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - indir = os.path.abspath(indir)+'/' - - outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - outdir = os.path.abspath(outdir)+'/' - - # Filenames - - fname_dict_nores = self._fname_dict( - target=target, config=config, product=product, res_tag=None, - extra_ext_in=extra_ext) - - fname_dict = self._fname_dict( - target=target, config=config, product=product, res_tag=res_tag, - extra_ext_in=extra_ext) - - # ... broad mask never has a resolution tag - - broadmask_file = fname_dict_nores['broadmask'] - - # ... files with resolution tag - - input_file = fname_dict['cube'] - noise_file = fname_dict['noise'] - strictmask_file = fname_dict['strictmask'] - - outroot = fname_dict['momentroot'] - - # Check input file and mask existence - if not (os.path.isfile(indir+input_file)): - logger.warning("Missing cube: "+indir+input_file) - return() + kwargs_dict = mom_params['kwargs'] - found_broadmask = (os.path.isfile(indir+broadmask_file)) - found_strictmask = (os.path.isfile(indir+strictmask_file)) + maps_to_pass = mom_params['maps_to_pass'] + for map_ext in maps_to_pass: - # Look up which moments to calculate + # File name for this extension + this_map_file = outroot + '_' + map_ext + '.fits' - list_of_moments = self._kh.get_moment_list(config=config, product=product) + # Verify file existence + if not (os.path.isfile(indir + this_map_file)): + logger.warning("Missing needed context file: " + indir + this_map_file) + proceed = False - rounds = [] - for this_mom in list_of_moments: - rounds.append(self._kh.get_params_for_moment(this_mom)['round']) - uniqrounds = sorted(list(set(rounds))) + # Add as param to kwarg dict + kwargs_dict[map_ext] = indir + this_map_file - if len(uniqrounds) == 1: - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") - logger.info("... Total stages of moment calculation: {0}".format(len(uniqrounds))) - logger.info("... Secondary moments requested but not specified in moment keys") - logger.info("... Returning to main loop") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") - return() + other_exts = mom_params['other_exts'] + for param_name in other_exts.keys(): - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Report - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # File name for this extension + this_ext = other_exts[param_name] - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Generating moment maps for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - if res_tag is not None: - logger.info("Resolution "+str(res_tag)) - logger.info("Found a strict mask? "+str(found_strictmask)) - logger.info("Found a broad mask? "+str(found_broadmask)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") - - logger.info("... input file: "+input_file) - logger.info("... noise file: "+noise_file) - logger.info("... strict mask file: "+strictmask_file) - logger.info("... broad mask file: "+broadmask_file) - logger.info("... list of moments: "+str(list_of_moments)) - logger.info("... output root: "+outroot) - - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Execute - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") - - logger.info("... Total stages of moment calculation: {0}".format(len(uniqrounds))) - - for thisround in uniqrounds[1:2]: - logger.info("... Now, calculate all moments in stage {0}".format(thisround)) - logger.info("") - - sublist_of_moments = [this_mom for this_mom in list_of_moments - if self._kh.get_params_for_moment(this_mom)['round'] == thisround] - - if (not self._dry_run): - for this_mom in sublist_of_moments: - logger.info('... generating moment: '+str(this_mom)) - - proceed = True - - mom_params = self._kh.get_params_for_moment(this_mom) - - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - # Look up mask - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if mom_params['mask'] is None: - mask_file = None - elif mom_params['mask'].strip().lower() == 'none': - mask_file = None - elif mom_params['mask'] == 'strictmask': - if not found_strictmask: - logger.warning("Strict mask needed but not found. Skipping.") - continue - mask_file = indir+strictmask_file - elif mom_params['mask'] == 'broadmask': - if not found_broadmask: - logger.warning("Broad mask needed but not found. Skipping.") - continue - mask_file = indir+broadmask_file - else: - logger.warning("Mask choice not recognized for moment: "+str(this_mom)) - logger.warning("Skipping.") - continue + # Code to look up file name + this_ext_file = str(target) + this_ext - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - # Check noise - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + # Verify file existence + if not (os.path.isfile(indir + this_ext_file)): + logger.warning("Missing needed context file: " + indir + this_ext_file) + proceed = False - if not (os.path.isfile(indir+noise_file)): - logger.warning("Missing noise: "+indir+noise_file) - noise_in = None - errorfile = None - else: - noise_in = indir+noise_file - errorfile = outdir+outroot+mom_params['ext_error']+'.fits' + # Add as param to kwarg dict + kwargs_dict[param_name] = indir + this_ext_file - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - # Set up output file - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + if proceed == False: + logger.warning("Missing some needed information. Skipping this calculation.") + continue - outfile = outdir+outroot+mom_params['ext']+'.fits' + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + # Call the moment generator + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - # Look up maps to pass and build the kwarg list - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + # print(kwargs_dict) - kwargs_dict = mom_params['kwargs'] + moment_generator( + indir + input_file, mask=mask_file, noise=noise_in, + outfile=outfile, errorfile=errorfile, + channel_correlation=None, + moment=mom_params['algorithm'], + momkwargs=kwargs_dict, + # Deprecated context + ) - maps_to_pass = mom_params['maps_to_pass'] - for map_ext in maps_to_pass: + # endregion - # File name for this extension - this_map_file = outroot+'_'+map_ext+'.fits' + # region vfield + def task_build_vfield( + self, + target=None, + config=None, + product=None, + res_tag=None, + extra_ext='', + overwrite=False, + ): + """ + Generate a combined velocity field from a list of mom1 maps. + """ - # Verify file existence - if not (os.path.isfile(indir+this_map_file)): - logger.warning("Missing needed context file: "+indir+this_map_file) - proceed = False + # Generate file names - # Add as param to kwarg dict - kwargs_dict[map_ext] = indir+this_map_file + indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + indir = os.path.abspath(indir) + '/' - other_exts = mom_params['other_exts'] - for param_name in other_exts.keys(): + outdir = self._kh.get_vfield_dir_for_target(target=target, changeto=False) + outdir = os.path.abspath(outdir) + '/' - # File name for this extension - this_ext = other_exts[param_name] + fname_dict = self._fname_dict( + target=target, config=config, product=product, res_tag=res_tag, + extra_ext_in=extra_ext) - # Code to look up file name - this_ext_file = str(target)+this_ext + input_file = fname_dict['momentroot'] + '_mom1wprior.fits' + outfile = fname_dict['vfield'] - # Verify file existence - if not (os.path.isfile(indir+this_ext_file)): - logger.warning("Missing needed context file: "+indir+this_ext_file) - proceed = False + # Check input file existence - # Add as param to kwarg dict - kwargs_dict[param_name] = indir+this_ext_file + if not (os.path.isfile(indir + input_file)): + logger.warning("Missing cube: " + indir + input_file) + return () - if proceed == False: - logger.warning("Missing some needed information. Skipping this calculation.") - continue + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Create the list of maps to combine + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - # Call the moment generator - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - # print(kwargs_dict) - - moment_generator( - indir+input_file, mask=mask_file, noise=noise_in, - outfile=outfile, errorfile=errorfile, - channel_correlation=None, - moment=mom_params['algorithm'], - momkwargs=kwargs_dict, - # Deprecated context - ) - # endregion - - # region vfield - def task_build_vfield( - self, - target = None, - config = None, - product = None, - res_tag = None, - extra_ext = '', - overwrite = False, - ): - """ - Generate a combined velocity field from a list of mom1 maps. - """ + list_of_vfiels = [] - # Generate file names + linked_configs = self._kh.get_linked_mask_configs( + config=config, product=product) - indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - indir = os.path.abspath(indir)+'/' + if config not in linked_configs: + linked_configs.append(config) - outdir = self._kh.get_vfield_dir_for_target(target=target, changeto=False) - outdir = os.path.abspath(outdir)+'/' + for cross_config in linked_configs: fname_dict = self._fname_dict( - target=target, config=config, product=product, res_tag=res_tag, + target=target, config=cross_config, product=product, res_tag=None, extra_ext_in=extra_ext) - input_file = fname_dict['momentroot']+'_mom1wprior.fits' - outfile = fname_dict['vfield'] - - # Check input file existence - - if not (os.path.isfile(indir+input_file)): - logger.warning("Missing cube: "+indir+input_file) - return() - - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Create the list of maps to combine - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - - list_of_vfiels = [] - - linked_configs = self._kh.get_linked_mask_configs( - config=config, product=product) + this_vfield = fname_dict['momentroot'] + '_mom1wprior.fits' + if this_vfield not in list_of_vfiels: + if os.path.isfile(indir + this_vfield): + list_of_vfiels.append(indir + this_vfield) - if config not in linked_configs: - linked_configs.append(config) + # Loop over all angular and physical resolutions. - for cross_config in linked_configs: + for this_res in self._kh.get_ang_res_dict( + config=cross_config, product=product): fname_dict = self._fname_dict( - target=target, config=cross_config, product=product, res_tag=None, + target=target, config=cross_config, product=product, res_tag=this_res, extra_ext_in=extra_ext) - this_vfield = fname_dict['momentroot']+'_mom1wprior.fits' + this_vfield = fname_dict['momentroot'] + '_mom1wprior.fits' if this_vfield not in list_of_vfiels: - if os.path.isfile(indir+this_vfield): - list_of_vfiels.append(indir+this_vfield) + if os.path.isfile(indir + this_vfield): + list_of_vfiels.append(indir + this_vfield) - # Loop over all angular and physical resolutions. - - for this_res in self._kh.get_ang_res_dict( - config=cross_config,product=product): + for this_res in self._kh.get_phys_res_dict( + config=cross_config, product=product): - fname_dict = self._fname_dict( - target=target, config=cross_config, product=product, res_tag=this_res, - extra_ext_in=extra_ext) - - this_vfield = fname_dict['momentroot']+'_mom1wprior.fits' - if this_vfield not in list_of_vfiels: - if os.path.isfile(indir+this_vfield): - list_of_vfiels.append(indir+this_vfield) - - for this_res in self._kh.get_phys_res_dict( - config=cross_config,product=product): + fname_dict = self._fname_dict( + target=target, config=cross_config, product=product, res_tag=this_res, + extra_ext_in=extra_ext) - fname_dict = self._fname_dict( - target=target, config=cross_config, product=product, res_tag=this_res, - extra_ext_in=extra_ext) + this_vfield = fname_dict['momentroot'] + '_mom1wprior.fits' + if this_vfield not in list_of_vfiels: + if os.path.isfile(indir + this_vfield): + list_of_vfiels.append(indir + this_vfield) + + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Report + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Creating a velocity field for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") + + logger.info("Input file: " + input_file) + logger.info("List of other velocity fields:") + for this_vfield in list_of_vfiels: + logger.info(str(this_vfield)) + logger.info("Target file: " + outfile) + + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Call the vfield combining routine + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + + if (not self._dry_run): + recipe_phangs_vfield( + indir + input_file, + list_of_vfields=list_of_vfiels, + outfile=outdir + outfile, + overwrite=overwrite) - this_vfield = fname_dict['momentroot']+'_mom1wprior.fits' - if this_vfield not in list_of_vfiels: - if os.path.isfile(indir+this_vfield): - list_of_vfiels.append(indir+this_vfield) + # endregion - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Report - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # region shuffle cube - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Creating a velocity field for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") + def task_shuffle_cube( + self, + target=None, + config=None, + product=None, + res_tag=None, + extra_ext='', + overwrite=False, + ): + """ + Construct shuffled cube and save it to disk. + """ - logger.info("Input file: "+input_file) - logger.info("List of other velocity fields:") - for this_vfield in list_of_vfiels: - logger.info(str(this_vfield)) - logger.info("Target file: "+outfile) + # Generate file names - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Call the vfield combining routine - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + indir = os.path.abspath(indir) + '/' - if (not self._dry_run): - recipe_phangs_vfield( - indir+input_file, - list_of_vfields=list_of_vfiels, - outfile=outdir+outfile, - overwrite=overwrite) - # endregion + outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + outdir = os.path.abspath(outdir) + '/' - # region shuffle cube + vfield_dir = self._kh.get_vfield_dir_for_target(target=target, changeto=False) + vfield_dir = os.path.abspath(vfield_dir) + '/' - def task_shuffle_cube( - self, - target = None, - config = None, - product = None, - res_tag = None, - extra_ext = '', - overwrite = False, - ): - """ - Construct shuffled cube and save it to disk. - """ + fname_dict = self._fname_dict( + target=target, config=config, product=product, res_tag=res_tag, + extra_ext_in=extra_ext) - # Generate file names + input_file = fname_dict['cube'] + vfield_file = fname_dict['vfield'] + outfile = fname_dict['shuffled'] - indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - indir = os.path.abspath(indir)+'/' + # Check input file existence - outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - outdir = os.path.abspath(outdir)+'/' + if not (os.path.isfile(indir + input_file)): + logger.warning("Missing cube: " + indir + input_file) + return () - vfield_dir = self._kh.get_vfield_dir_for_target(target=target, changeto=False) - vfield_dir = os.path.abspath(vfield_dir)+'/' + if not (os.path.isfile(vfield_dir + vfield_file)): + logger.warning("Missing velocity field: " + vfield_dir + vfield_file) + return () - fname_dict = self._fname_dict( - target=target, config=config, product=product, res_tag=res_tag, - extra_ext_in=extra_ext) + # Access keywords for noise generation - input_file = fname_dict['cube'] - vfield_file = fname_dict['vfield'] - outfile = fname_dict['shuffled'] + shuffle_kwargs = self._kh.get_derived_kwargs( + config=config, product=product, kwarg_type='shuffle_kw') - # Check input file existence + # Report - if not (os.path.isfile(indir+input_file)): - logger.warning("Missing cube: "+indir+input_file) - return() + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Running shuffling for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") - if not (os.path.isfile(vfield_dir+vfield_file)): - logger.warning("Missing velocity field: "+vfield_dir+vfield_file) - return() + logger.info("Input file: " + input_file) + logger.info("Target file: " + outfile) + logger.info("Keyword arguments: " + str(shuffle_kwargs)) - # Access keywords for noise generation + # Call shuffling routine - shuffle_kwargs = self._kh.get_derived_kwargs( - config=config, product=product, kwarg_type='shuffle_kw') + recipe_shuffle_cube( + cube_in=indir + input_file, + vfield_in=vfield_dir + vfield_file, + outfile=outdir + outfile, + overwrite=overwrite) - # Report + # endregion - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Running shuffling for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") + # region flat strict mask - logger.info("Input file: "+input_file) - logger.info("Target file: "+outfile) - logger.info("Keyword arguments: "+str(shuffle_kwargs)) - - # Call shuffling routine + def task_build_flat_strict_mask( + self, + target=None, + config=None, + product=None, + res_tag=None, + extra_ext='', + overwrite=False, + ): + """ + Construct the flat strict mask and save it to disk. + """ - recipe_shuffle_cube( - cube_in=indir+input_file, - vfield_in=vfield_dir+vfield_file, - outfile=outdir+outfile, - overwrite=overwrite) - # endregion + # Generate file names - # region flat strict mask + indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + indir = os.path.abspath(indir) + '/' - def task_build_flat_strict_mask( - self, - target = None, - config = None, - product = None, - res_tag = None, - extra_ext = '', - overwrite = False, - ): - """ - Construct the flat strict mask and save it to disk. - """ + outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + outdir = os.path.abspath(outdir) + '/' - # Generate file names + vfield_dir = self._kh.get_vfield_dir_for_target(target=target, changeto=False) + vfield_dir = os.path.abspath(vfield_dir) + '/' - indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - indir = os.path.abspath(indir)+'/' + fname_dict = self._fname_dict( + target=target, config=config, product=product, res_tag=res_tag, + extra_ext_in=extra_ext) - outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - outdir = os.path.abspath(outdir)+'/' + input_file = fname_dict['cube'] + vfield_file = fname_dict['vfield'] + mask_file = fname_dict['strictmask'] + coverage_file = fname_dict['coverage'] + coverage2d_file = fname_dict['coverage2d'] - vfield_dir = self._kh.get_vfield_dir_for_target(target=target, changeto=False) - vfield_dir = os.path.abspath(vfield_dir)+'/' + outfile = fname_dict['flatstrictmask'] - fname_dict = self._fname_dict( - target=target, config=config, product=product, res_tag=res_tag, - extra_ext_in=extra_ext) + # Check input file existence - input_file = fname_dict['cube'] - vfield_file = fname_dict['vfield'] - mask_file = fname_dict['strictmask'] - coverage_file = fname_dict['coverage'] - coverage2d_file = fname_dict['coverage2d'] + if not (os.path.isfile(indir + input_file)): + logger.warning("Missing cube: " + indir + input_file) + return () - outfile = fname_dict['flatstrictmask'] + if not (os.path.isfile(vfield_dir + vfield_file)): + logger.warning("Missing velocity field: " + vfield_dir + vfield_file) + return () - # Check input file existence + if not (os.path.isfile(indir + mask_file)): + logger.warning("Missing strict mask: " + indir + mask_file) + return () - if not (os.path.isfile(indir+input_file)): - logger.warning("Missing cube: "+indir+input_file) - return() + # Coverage - if not (os.path.isfile(vfield_dir+vfield_file)): - logger.warning("Missing velocity field: "+vfield_dir+vfield_file) - return() + if not (os.path.isfile(indir + coverage_file)): + logger.warning("Missing coverage estimate: " + indir + coverage_file) + logger.warning("This may be fine. .") + coverage_file = None - if not (os.path.isfile(indir+mask_file)): - logger.warning("Missing strict mask: "+indir+mask_file) - return() + # Access keywords for mask generation - # Coverage + flatmask_kwargs = self._kh.get_derived_kwargs( + config=config, product=product, kwarg_type='flatstrictmask_kw') - if not (os.path.isfile(indir+coverage_file)): - logger.warning("Missing coverage estimate: "+indir+coverage_file) - logger.warning("This may be fine. .") - coverage_file = None + # get velocity window from separate key if not provided from derived key + if not flatmask_kwargs: + this_window = self._kh.get_window_for_target(target) + if this_window is None: + logger.error("No velocity window for target " + target) + return () + flatmask_kwargs = {'v_window': this_window} - # Access keywords for mask generation + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Report + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - flatmask_kwargs = self._kh.get_derived_kwargs( - config=config, product=product, kwarg_type='flatstrictmask_kw') + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Creating a flat strict mask for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") - # get velocity window from separate key if not provided from derived key - if not flatmask_kwargs: - this_window = self._kh.get_window_for_target(target) - if this_window is None: - logger.error("No velocity window for target "+target) - return() - flatmask_kwargs = {'v_window':this_window} + logger.info("Input file: " + input_file) + logger.info("Velocity field file: " + vfield_file) + logger.info("Strict mask file: " + mask_file) + if coverage_file is not None: + logger.info("Coverage file: " + coverage_file) + logger.info("Target file: " + outfile) + logger.info("Keyword arguments: " + str(flatmask_kwargs)) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Report - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Call the masking routines + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Creating a flat strict mask for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") + if (not self._dry_run): - logger.info("Input file: "+input_file) - logger.info("Velocity field file: "+vfield_file) - logger.info("Strict mask file: "+mask_file) + # ... put the directory into the name to allow it to remain + # None when missing. if coverage_file is not None: - logger.info("Coverage file: "+coverage_file) - logger.info("Target file: "+outfile) - logger.info("Keyword arguments: "+str(flatmask_kwargs)) + coverage_file_in = indir + coverage_file + else: + coverage_file_in = None + + # run flat mask routine + recipe_phangs_flat_mask( + cube_in=indir + input_file, + vfield_in=vfield_dir + vfield_file, + mask_in=indir + mask_file, + coverage=coverage_file_in, + outfile=outdir + outfile, + mask_kwargs=flatmask_kwargs, + return_spectral_cube=False, + overwrite=overwrite) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Call the masking routines - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # region flat broad mask - if (not self._dry_run): + def task_build_flat_broad_mask( + self, + target=None, + config=None, + product=None, + res_tag=None, + extra_ext='', + overwrite=False, + ): + """ + Construct the flat broad mask and save it to disk. + """ - # ... put the directory into the name to allow it to remain - # None when missing. - if coverage_file is not None: - coverage_file_in = indir+coverage_file - else: - coverage_file_in = None - - # run flat mask routine - recipe_phangs_flat_mask( - cube_in=indir+input_file, - vfield_in=vfield_dir+vfield_file, - mask_in=indir+mask_file, - coverage=coverage_file_in, - outfile=outdir+outfile, - mask_kwargs=flatmask_kwargs, - return_spectral_cube=False, - overwrite=overwrite) + # Generate file names - # region flat broad mask + indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + indir = os.path.abspath(indir) + '/' - def task_build_flat_broad_mask( - self, - target = None, - config = None, - product = None, - res_tag = None, - extra_ext = '', - overwrite = False, - ): - """ - Construct the flat broad mask and save it to disk. - """ + outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + outdir = os.path.abspath(outdir) + '/' - # Generate file names + vfield_dir = self._kh.get_vfield_dir_for_target(target=target, changeto=False) + vfield_dir = os.path.abspath(vfield_dir) + '/' - indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - indir = os.path.abspath(indir)+'/' + fname_dict = self._fname_dict( + target=target, config=config, product=product, res_tag=res_tag, + extra_ext_in=extra_ext) - outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - outdir = os.path.abspath(outdir)+'/' + input_file = fname_dict['cube'] + vfield_file = fname_dict['vfield'] + mask_file = fname_dict['broadmask'] + coverage_file = fname_dict['coverage'] + coverage2d_file = fname_dict['coverage2d'] - vfield_dir = self._kh.get_vfield_dir_for_target(target=target, changeto=False) - vfield_dir = os.path.abspath(vfield_dir)+'/' + outfile = fname_dict['flatbroadmask'] - fname_dict = self._fname_dict( - target=target, config=config, product=product, res_tag=res_tag, - extra_ext_in=extra_ext) + # Check input file existence - input_file = fname_dict['cube'] - vfield_file = fname_dict['vfield'] - mask_file = fname_dict['broadmask'] - coverage_file = fname_dict['coverage'] - coverage2d_file = fname_dict['coverage2d'] + if not (os.path.isfile(indir + input_file)): + logger.warning("Missing cube: " + indir + input_file) + return () - outfile = fname_dict['flatbroadmask'] + if not (os.path.isfile(vfield_dir + vfield_file)): + logger.warning("Missing velocity field: " + vfield_dir + vfield_file) + return () - # Check input file existence + if not (os.path.isfile(indir + mask_file)): + logger.warning("Missing broad mask: " + indir + mask_file) + return () - if not (os.path.isfile(indir+input_file)): - logger.warning("Missing cube: "+indir+input_file) - return() + # Coverage - if not (os.path.isfile(vfield_dir+vfield_file)): - logger.warning("Missing velocity field: "+vfield_dir+vfield_file) - return() + if not (os.path.isfile(indir + coverage_file)): + logger.warning("Missing coverage estimate: " + indir + coverage_file) + logger.warning("This may be fine. Proceeding.") + coverage_file = None - if not (os.path.isfile(indir+mask_file)): - logger.warning("Missing broad mask: "+indir+mask_file) - return() + # Access keywords for mask generation - # Coverage + flatmask_kwargs = self._kh.get_derived_kwargs( + config=config, product=product, kwarg_type='flatbroadmask_kw') - if not (os.path.isfile(indir+coverage_file)): - logger.warning("Missing coverage estimate: "+indir+coverage_file) - logger.warning("This may be fine. Proceeding.") - coverage_file = None + # get velocity window from separate key if not provided from derived key + if not flatmask_kwargs: + this_window = self._kh.get_window_for_target(target) + if this_window is None: + logger.error("No velocity window for target " + target) + return () + flatmask_kwargs = {'window': this_window} - # Access keywords for mask generation - - flatmask_kwargs = self._kh.get_derived_kwargs( - config=config, product=product, kwarg_type='flatbroadmask_kw') - - # get velocity window from separate key if not provided from derived key - if not flatmask_kwargs: - this_window = self._kh.get_window_for_target(target) - if this_window is None: - logger.error("No velocity window for target "+target) - return() - flatmask_kwargs = {'window':this_window} + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Report + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Report - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Creating a flat broad mask for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Creating a flat broad mask for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") + logger.info("Input file: " + input_file) + logger.info("Velocity field file: " + vfield_file) + logger.info("Broad mask file: " + mask_file) + if coverage_file is not None: + logger.info("Coverage file: " + coverage_file) + logger.info("Target file: " + outfile) + logger.info("Keyword arguments: " + str(flatmask_kwargs)) - logger.info("Input file: "+input_file) - logger.info("Velocity field file: "+vfield_file) - logger.info("Broad mask file: "+mask_file) - if coverage_file is not None: - logger.info("Coverage file: "+coverage_file) - logger.info("Target file: "+outfile) - logger.info("Keyword arguments: "+str(flatmask_kwargs)) + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Call the masking routines + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Call the masking routines - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + if (not self._dry_run): - if (not self._dry_run): + # ... put the directory into the name to allow it to remain + # None when missing. + if coverage_file is not None: + coverage_file_in = indir + coverage_file + else: + coverage_file_in = None + + # run flat mask routine + recipe_phangs_flat_mask( + cube_in=indir + input_file, + vfield_in=vfield_dir + vfield_file, + mask_in=indir + mask_file, + coverage=coverage_file_in, + outfile=outdir + outfile, + mask_kwargs=flatmask_kwargs, + return_spectral_cube=False, + overwrite=overwrite) - # ... put the directory into the name to allow it to remain - # None when missing. - if coverage_file is not None: - coverage_file_in = indir+coverage_file - else: - coverage_file_in = None - - # run flat mask routine - recipe_phangs_flat_mask( - cube_in=indir+input_file, - vfield_in=vfield_dir+vfield_file, - mask_in=indir+mask_file, - coverage=coverage_file_in, - outfile=outdir+outfile, - mask_kwargs=flatmask_kwargs, - return_spectral_cube=False, - overwrite=overwrite) - # end regions + # end regions - # region flat maps + # region flat maps - def task_generate_flatmaps( + def task_generate_flatmaps( self, - target = None, - config = None, - product = None, - res_tag = None, - extra_ext = '', - overwrite = False, - ): - """ - Generate moment maps. - """ - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Look up filenames, list of moments, etc. - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - - # Generate file names - - indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - indir = os.path.abspath(indir)+'/' - - outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) - outdir = os.path.abspath(outdir)+'/' - - # Filenames - - fname_dict_nores = self._fname_dict( - target=target, config=config, product=product, res_tag=None, - extra_ext_in=extra_ext) + target=None, + config=None, + product=None, + res_tag=None, + extra_ext='', + overwrite=False, + ): + """ + Generate moment maps. + """ + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Look up filenames, list of moments, etc. + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - fname_dict = self._fname_dict( - target=target, config=config, product=product, res_tag=res_tag, - extra_ext_in=extra_ext) + # Generate file names - # ... broad mask never has a resolution tag + indir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + indir = os.path.abspath(indir) + '/' - broadmask_file = fname_dict_nores['flatbroadmask'] + outdir = self._kh.get_derived_dir_for_target(target=target, changeto=False) + outdir = os.path.abspath(outdir) + '/' - # ... files with resolution tag + # Filenames - input_file = fname_dict['cube'] - noise_file = fname_dict['noise'] - strictmask_file = fname_dict['flatstrictmask'] + fname_dict_nores = self._fname_dict( + target=target, config=config, product=product, res_tag=None, + extra_ext_in=extra_ext) - outroot = fname_dict['momentroot'] + fname_dict = self._fname_dict( + target=target, config=config, product=product, res_tag=res_tag, + extra_ext_in=extra_ext) - # Check input file and mask existence + # ... broad mask never has a resolution tag - if not (os.path.isfile(indir+input_file)): - logger.warning("Missing cube: "+indir+input_file) - return() + broadmask_file = fname_dict_nores['flatbroadmask'] - found_broadmask = (os.path.isfile(indir+broadmask_file)) - found_strictmask = (os.path.isfile(indir+strictmask_file)) + # ... files with resolution tag - # Look up which moments to calculate + input_file = fname_dict['cube'] + noise_file = fname_dict['noise'] + strictmask_file = fname_dict['flatstrictmask'] - list_of_moments = self._kh.get_moment_list(config=config, product=product) + outroot = fname_dict['momentroot'] - rounds = [] - for this_mom in list_of_moments: - rounds.append(self._kh.get_params_for_moment(this_mom)['round']) - uniqrounds = sorted(list(set(rounds))) + # Check input file and mask existence - if len(uniqrounds) == 1: - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") - logger.info("... Total stages of moment calculation: {0}".format(len(uniqrounds))) - logger.info("... Secondary moments requested but not specified in moment keys") - logger.info("... Returning to main loop") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") - return() + if not (os.path.isfile(indir + input_file)): + logger.warning("Missing cube: " + indir + input_file) + return () - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Report - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + found_broadmask = (os.path.isfile(indir + broadmask_file)) + found_strictmask = (os.path.isfile(indir + strictmask_file)) - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Generating moment maps for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - if res_tag is not None: - logger.info("Resolution "+str(res_tag)) - logger.info("Found a flat strict mask? "+str(found_strictmask)) - logger.info("Found a flat broad mask? "+str(found_broadmask)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") + # Look up which moments to calculate - logger.info("... input file: "+input_file) - logger.info("... noise file: "+noise_file) - logger.info("... flat strict mask file: "+strictmask_file) - logger.info("... flat broad mask file: "+broadmask_file) - logger.info("... list of moments: "+str(list_of_moments)) - logger.info("... output root: "+outroot) + list_of_moments = self._kh.get_moment_list(config=config, product=product) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Execute - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + rounds = [] + for this_mom in list_of_moments: + rounds.append(self._kh.get_params_for_moment(this_mom)['round']) + uniqrounds = sorted(list(set(rounds))) + if len(uniqrounds) == 1: logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") logger.info("") - logger.info("... Total stages of moment calculation: {0}".format(len(uniqrounds))) + logger.info("... Secondary moments requested but not specified in moment keys") + logger.info("... Returning to main loop") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") + return () + + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Report + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Generating moment maps for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + if res_tag is not None: + logger.info("Resolution " + str(res_tag)) + logger.info("Found a flat strict mask? " + str(found_strictmask)) + logger.info("Found a flat broad mask? " + str(found_broadmask)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") + + logger.info("... input file: " + input_file) + logger.info("... noise file: " + noise_file) + logger.info("... flat strict mask file: " + strictmask_file) + logger.info("... flat broad mask file: " + broadmask_file) + logger.info("... list of moments: " + str(list_of_moments)) + logger.info("... output root: " + outroot) + + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Execute + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") + + logger.info("... Total stages of moment calculation: {0}".format(len(uniqrounds))) + + for thisround in uniqrounds[2:3]: + logger.info("... Now, calculate all moments in stage {0}".format(thisround)) + logger.info("") - for thisround in uniqrounds[2:3]: - logger.info("... Now, calculate all moments in stage {0}".format(thisround)) - logger.info("") - - sublist_of_moments = [this_mom for this_mom in list_of_moments - if self._kh.get_params_for_moment(this_mom)['round'] == thisround] - - if (not self._dry_run): - for this_mom in sublist_of_moments: - logger.info('... generating moment: '+str(this_mom)) - - proceed = True - - mom_params = self._kh.get_params_for_moment(this_mom) - - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - # Look up mask - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if mom_params['mask'] is None: - mask_file = None - elif mom_params['mask'].strip().lower() == 'none': - mask_file = None - elif mom_params['mask'] == 'flatstrictmask': - if not found_strictmask: - logger.warning("Flat strict mask needed but not found. Skipping.") - continue - mask_file = indir+strictmask_file - elif mom_params['mask'] == 'flatbroadmask': - if not found_broadmask: - logger.warning("Flat broad mask needed but not found. Skipping.") - continue - mask_file = indir+broadmask_file - else: - logger.warning("Mask choice not recognized for moment: "+str(this_mom)) - logger.warning("Skipping.") - continue + sublist_of_moments = [this_mom for this_mom in list_of_moments + if self._kh.get_params_for_moment(this_mom)['round'] == thisround] - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - # Check noise - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + if (not self._dry_run): + for this_mom in sublist_of_moments: + logger.info('... generating moment: ' + str(this_mom)) - if not (os.path.isfile(indir+noise_file)): - logger.warning("Missing noise: "+indir+noise_file) - noise_in = None - errorfile = None - else: - noise_in = indir+noise_file - errorfile = outdir+outroot+mom_params['ext_error']+'.fits' + proceed = True - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - # Set up output file - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + mom_params = self._kh.get_params_for_moment(this_mom) - outfile = outdir+outroot+mom_params['ext']+'.fits' + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + # Look up mask + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - # Look up maps to pass and build the kwarg list - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + if mom_params['mask'] is None: + mask_file = None + elif mom_params['mask'].strip().lower() == 'none': + mask_file = None + elif mom_params['mask'] == 'flatstrictmask': + if not found_strictmask: + logger.warning("Flat strict mask needed but not found. Skipping.") + continue + mask_file = indir + strictmask_file + elif mom_params['mask'] == 'flatbroadmask': + if not found_broadmask: + logger.warning("Flat broad mask needed but not found. Skipping.") + continue + mask_file = indir + broadmask_file + else: + logger.warning("Mask choice not recognized for moment: " + str(this_mom)) + logger.warning("Skipping.") + continue - kwargs_dict = mom_params['kwargs'] + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + # Check noise + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - maps_to_pass = mom_params['maps_to_pass'] - for map_ext in maps_to_pass: + if not (os.path.isfile(indir + noise_file)): + logger.warning("Missing noise: " + indir + noise_file) + noise_in = None + errorfile = None + else: + noise_in = indir + noise_file + errorfile = outdir + outroot + mom_params['ext_error'] + '.fits' - # File name for this extension - this_map_file = outroot+'_'+map_ext+'.fits' + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + # Set up output file + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - # Verify file existence - if not (os.path.isfile(indir+this_map_file)): - logger.warning("Missing needed context file: "+indir+this_map_file) - proceed = False + outfile = outdir + outroot + mom_params['ext'] + '.fits' - # Add as param to kwarg dict - kwargs_dict[map_ext] = indir+this_map_file + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + # Look up maps to pass and build the kwarg list + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - other_exts = mom_params['other_exts'] - for param_name in other_exts.keys(): + kwargs_dict = mom_params['kwargs'] - # File name for this extension - this_ext = other_exts[param_name] + maps_to_pass = mom_params['maps_to_pass'] + for map_ext in maps_to_pass: - # Code to look up file name - this_ext_file = str(target)+this_ext + # File name for this extension + this_map_file = outroot + '_' + map_ext + '.fits' - # Verify file existence - if not (os.path.isfile(indir+this_ext_file)): - logger.warning("Missing needed context file: "+indir+this_ext_file) - proceed = False + # Verify file existence + if not (os.path.isfile(indir + this_map_file)): + logger.warning("Missing needed context file: " + indir + this_map_file) + proceed = False - # Add as param to kwarg dict - kwargs_dict[param_name] = indir+this_ext_file + # Add as param to kwarg dict + kwargs_dict[map_ext] = indir + this_map_file - if proceed == False: - logger.warning("Missing some needed information. Skipping this calculation.") - continue + other_exts = mom_params['other_exts'] + for param_name in other_exts.keys(): - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - # Call the moment generator - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + # File name for this extension + this_ext = other_exts[param_name] - moment_generator( - indir+input_file, mask=mask_file, noise=noise_in, - moment=mom_params['algorithm'], - momkwargs=mom_params['kwargs'], - outfile=outfile, errorfile=errorfile, - channel_correlation=None) - # endregion + # Code to look up file name + this_ext_file = str(target) + this_ext -else: - # Make a mock DerivedHandler when astropy (and therefore spectral-cube) - # is not available (i.e. in a casa environment) + # Verify file existence + if not (os.path.isfile(indir + this_ext_file)): + logger.warning("Missing needed context file: " + indir + this_ext_file) + proceed = False - class DerivedHandler(handlerTemplate.HandlerTemplate): - """ - Class to create signal masks based on image cubes, and then apply - the masks to make moment maps. This is done for each galaxy at - multiple spatial/angular scales. - """ + # Add as param to kwarg dict + kwargs_dict[param_name] = indir + this_ext_file + if proceed == False: + logger.warning("Missing some needed information. Skipping this calculation.") + continue - ############ - # __init__ # - ############ + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + # Call the moment generator + # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - def __init__( - self, - key_handler = None, - dry_run = False, - ): - raise ImportError("DerivedHandler can only be used when astropy and spectral-cube " - " is installed.") + moment_generator( + indir + input_file, mask=mask_file, noise=noise_in, + moment=mom_params['algorithm'], + momkwargs=mom_params['kwargs'], + outfile=outfile, errorfile=errorfile, + channel_correlation=None) + # endregion diff --git a/phangsPipeline/handlerImaging.py b/phangsPipeline/handlerImaging.py index c64ff274..e99e28d2 100644 --- a/phangsPipeline/handlerImaging.py +++ b/phangsPipeline/handlerImaging.py @@ -38,38 +38,21 @@ """ -## 20200210 dzliu: self._kh._cleanmask_dict is always None. It is not yet implemented in "handlerKeys.py"! -## 20200214 dzliu: will users want to do imaging for individual project instead of concatenated ms? -## 20200214 dzliu: needing KeyHandler API: -## 20200214 dzliu: self._kh._cleanmask_dict --> self._kh.get_cleanmask() # input target name, output clean mask file -## 20200214 dzliu: self._kh._target_dict --> self._kh.get_target_dict() # for rastring, decstring -## 20200214 dzliu: self._kh._override_dict --> self._kh.get_overrides() -## 20200214 dzliu: self._kh._dir_keys --> self._kh.get_target_name_for_multipart_name() -## 20200214 dzliu: self._kh._config_dict['interf_config']['clean_scales_arcsec'] # angular scales -## 20200218 dzliu: CASA 5.4.0 works, but CASA 5.6.0 does not work!! -- now should work. -## 20200218 dzliu: revert does not work! -- copy_imaging suffix bug fixed. -## 20200218 dzliu: need to test 'cont' - -import os, sys, re, shutil -import glob import logging +import os + +# Check casa environment by importing CASA-only packages +from .check_imports import is_casa_installed + +casa_enabled = is_casa_installed() import numpy as np logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) -# Check casa environment by importing CASA-only packages -from .casa_check import is_casa_installed -casa_enabled = is_casa_installed() - - if casa_enabled: logger.debug('casa_enabled = True') - from . import casaImagingRoutines as imr - from . import casaMaskingRoutines as msr - # reload(imr) - # reload(msr) else: logger.debug('casa_enabled = False') @@ -80,11 +63,12 @@ from .clean_call import CleanCall, CleanCallFunctionDecorator - from . import utilsLines as lines + from . import casaImagingRoutines as imr + from . import casaMaskingRoutines as msr + from . import casaStuff from . import handlerTemplate + from . import utilsLines as lines from . import utilsFilenames - from . import casaStuff - class ImagingHandler(handlerTemplate.HandlerTemplate): """ @@ -768,7 +752,7 @@ def task_make_dirty_image( logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") logger.info("") - if not self._dry_run and casa_enabled: + if not self._dry_run: imr.make_dirty_image(clean_call, imaging_method=imaging_method) if backup: imr.copy_imaging( @@ -799,7 +783,7 @@ def task_revert_to_imaging( logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") logger.info("") - if (not self._dry_run) and casa_enabled: + if not self._dry_run: imr.copy_imaging( input_root=clean_call.get_param('imagename') + '_' + tag, output_root=clean_call.get_param('imagename'), @@ -854,8 +838,6 @@ def task_read_clean_mask( if self._dry_run: return () - if not casa_enabled: - return () # Get fname dict fname_dict = self._fname_dict(product=product, imagename=clean_call.get_param('imagename'), @@ -906,8 +888,6 @@ def task_multiscale_clean( if self._dry_run: return () - if not casa_enabled: - return () imr.clean_loop(clean_call=clean_call, imaging_method=imaging_method, @@ -992,8 +972,6 @@ def task_singlescale_mask( if self._dry_run: return() - if not casa_enabled: - return() # check if line product is_line_product = product in self._kh.get_line_products() @@ -1079,8 +1057,6 @@ def task_singlescale_clean( if self._dry_run: return () - if not casa_enabled: - return () imr.clean_loop(clean_call=clean_call, imaging_method=imaging_method, @@ -1141,7 +1117,7 @@ def task_export_to_fits( logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") logger.info("") - if not self._dry_run and casa_enabled: + if not self._dry_run: imr.export_imaging_to_fits(image_root, imaging_method=imaging_method) return () diff --git a/phangsPipeline/handlerImagingChunked.py b/phangsPipeline/handlerImagingChunked.py index 81db663b..b3b13085 100644 --- a/phangsPipeline/handlerImagingChunked.py +++ b/phangsPipeline/handlerImagingChunked.py @@ -7,42 +7,38 @@ """ -import os, sys, re, shutil import datetime -from copy import deepcopy, copy import glob import logging +import os import warnings +from copy import deepcopy, copy import numpy as np -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - # Check casa environment by importing CASA-only packages -from .casa_check import is_casa_installed +from .check_imports import is_casa_installed casa_enabled = is_casa_installed() +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) if casa_enabled: logger.debug('casa_enabled = True') - from . import casaImagingRoutines as imr - from . import casaMaskingRoutines as msr else: logger.debug('casa_enabled = False') if casa_enabled: - # Analysis utilities import analysisUtils as au - from .clean_call import CleanCall, CleanCallFunctionDecorator - - from . import utilsLines as lines + from . import casaImagingRoutines as imr + from . import casaMaskingRoutines as msr + from . import casaStuff from . import handlerTemplate from . import utilsFilenames - from . import casaStuff - + from . import utilsLines as lines + from .clean_call import CleanCall, CleanCallFunctionDecorator class ImagingChunkedHandler(handlerTemplate.HandlerTemplate): """ @@ -1018,7 +1014,7 @@ def task_make_dirty_image( product = self.product config = self.config overwrite = False - if not self._dry_run and casa_enabled: + if not self._dry_run: os.chdir(self._this_imaging_dir) @@ -1085,7 +1081,7 @@ def task_revert_to_imaging( chunks_iter = self.return_valid_chunks(chunk_num=chunk_num) - if (not self._dry_run) and casa_enabled: + if not self._dry_run: for ii, this_chunk_num in enumerate(chunks_iter): @@ -1219,8 +1215,6 @@ def task_multiscale_clean( if self._dry_run: return () - if not casa_enabled: - return () chunks_iter = self.return_valid_chunks(chunk_num=chunk_num) target = self.target @@ -1341,8 +1335,6 @@ def task_singlescale_mask( if self._dry_run: return() - if not casa_enabled: - return() # NOTE: we've removed the non-line defaults here as this approach should only be used for # line imaging. @@ -1439,8 +1431,6 @@ def task_singlescale_clean( if self._dry_run: return () - if not casa_enabled: - return () chunks_iter = self.return_valid_chunks(chunk_num=chunk_num) @@ -1584,7 +1574,7 @@ def task_export_to_fits( logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") logger.info("") - if not self._dry_run and casa_enabled: + if not self._dry_run: imr.export_imaging_to_fits(image_root) return () diff --git a/phangsPipeline/handlerKeys.py b/phangsPipeline/handlerKeys.py index 8ce7ca2f..360b1ed6 100644 --- a/phangsPipeline/handlerKeys.py +++ b/phangsPipeline/handlerKeys.py @@ -4,19 +4,12 @@ structure, etc. This part is pure python. """ -import os, sys, re import ast -import glob import logging -from math import floor +import os -import numpy as np - -from . import utilsLines as ll -from . import utilsLists as list_utils from . import utilsKeyReaders as key_readers -from . import utilsFilenames as fnames -from . import utilsResolutions +from . import utilsLists as list_utils logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) diff --git a/phangsPipeline/handlerPostprocess.py b/phangsPipeline/handlerPostprocess.py index 1cb75651..eb4e5e72 100644 --- a/phangsPipeline/handlerPostprocess.py +++ b/phangsPipeline/handlerPostprocess.py @@ -12,1406 +12,1291 @@ calls to CASA from this class. """ -import os, sys, re, shutil -import glob import logging +import os -import numpy as np +# Check casa environment by importing CASA-only packages +from .check_imports import is_casa_installed +casa_enabled = is_casa_installed() logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) - -# Check casa environment by importing CASA-only packages -from .casa_check import is_casa_installed -casa_enabled = is_casa_installed() - if casa_enabled: logger.debug('casa_enabled = True') - from . import casaCubeRoutines as ccr - from . import casaMosaicRoutines as cmr - from . import casaFeatherRoutines as cfr - # reload(ccr) - # reload(cmr) - # reload(cfr) else: logger.debug('casa_enabled = False') -from . import handlerTemplate -from . import utilsFilenames -from . import utilsResolutions +if casa_enabled: -from .clean_call import CleanCall + from . import casaCubeRoutines as ccr + from . import casaFeatherRoutines as cfr + from . import casaMosaicRoutines as cmr + from . import handlerTemplate + from . import utilsFilenames + from . import utilsResolutions + from .clean_call import CleanCall + class PostProcessHandler(handlerTemplate.HandlerTemplate): + """ + Class to handle post-processing of ALMA data. Post-processing here + begins with the results of imaging and proceeds through reduced, + science-ready data cubes. + """ -class PostProcessHandler(handlerTemplate.HandlerTemplate): - """ - Class to handle post-processing of ALMA data. Post-processing here - begins with the results of imaging and proceeds through reduced, - science-ready data cubes. - """ + def __init__( + self, + key_handler=None, + dry_run=False, + raise_exception_mosaic_part_missing=False, + ): + + # inherit template class + handlerTemplate.HandlerTemplate.__init__(self, key_handler=key_handler, dry_run=dry_run) + self.raise_exception_mosaic_part_missing = raise_exception_mosaic_part_missing + + # region File name routines + + ########################################### + # Defined file names for various products # + ########################################### - def __init__( - self, - key_handler = None, - dry_run = False, - raise_exception_mosaic_part_missing = False, + def _fname_dict( + self, + target=None, + config=None, + product=None, + imaging_method='tclean', + extra_ext='', ): + """ + Make the file name dictionary for all postprocess files. This + will give the a big dictionary of names where one can look up + each type of file (e.g., primary beam corrected, single dish + aligned, etc.) given some target, config, and product. This + routine has a lot of hard-coded knowledge about our + postprocessing conventions. + """ + + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Error checking + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + + if target is None: + logger.error("Need a target.") + return () + + if product is None: + logger.error("Need a product.") + return () + + if config is None: + logger.error("Need a config.") + return () + + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Initialize + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + + fname_dict = {} + + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Original files + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + + # Original cube + + tag = 'orig' + + if imaging_method == 'tclean': + casaext = '.image' + elif imaging_method == 'sdintimaging': + casaext = '.joint.cube.image' + else: + logger.error('imaging_method %s not recognised' % imaging_method) + raise Exception('imaging_method %s not recognised' % imaging_method) + + orig_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext=None, + casa=True, + casaext=casaext) + fname_dict[tag] = orig_file + + # Original primary beam file + + if imaging_method == 'tclean': + casaext = '.pb' + elif imaging_method == 'sdintimaging': + casaext = '.joint.cube.pb' + + tag = 'pb' + pb_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext=None, + casa=True, + casaext=casaext) + fname_dict[tag] = pb_file + + # Original single dish file (note that this comes with a + # directory) + + has_sd = self._kh.has_singledish(target=target, product=product) + tag = 'orig_sd' + if has_sd: + orig_sd_file = self._kh.get_sd_filename( + target=target, product=product) + fname_dict[tag] = orig_sd_file + else: + fname_dict[tag] = '' - # inherit template class - handlerTemplate.HandlerTemplate.__init__(self, key_handler = key_handler, dry_run = dry_run) - self.raise_exception_mosaic_part_missing = raise_exception_mosaic_part_missing + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Processed files (apply the extra_ext tag here) + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% -#region File name routines + # Primary beam corrected file + + tag = 'pbcorr' + pbcorr_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='pbcorr' + extra_ext, + casa=True, + casaext='.image') + fname_dict[tag] = pbcorr_file + + # Files with round beams + + tag = 'round' + round_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='round' + extra_ext, + casa=True, + casaext='.image') + fname_dict[tag] = round_file + + tag = 'pbcorr_round' + pbcorr_round_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='pbcorr_round' + extra_ext, + casa=True, + casaext='.image') + fname_dict[tag] = pbcorr_round_file + + # Weight file for use in linear mosaicking + + tag = 'weight' + weight_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='weight' + extra_ext, + casa=True, + casaext='.image') + fname_dict[tag] = weight_file + + tag = 'weight_aligned' + weight_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='weight_aligned' + extra_ext, + casa=True, + casaext='.image') + fname_dict[tag] = weight_file + + # Common resolution parts for mosaic + + tag = 'linmos_commonres' + commonres_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='linmos_commonres' + extra_ext, + casa=True, + casaext='.image') + fname_dict[tag] = commonres_file + + # Aligned parts for mosaic + + tag = 'linmos_aligned' + aligned_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='linmos_aligned' + extra_ext, + casa=True, + casaext='.image') + fname_dict[tag] = aligned_file + + # Imported single dish file aligned to the interfometer data + + tag = 'prepped_sd' + prepped_sd_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='singledish' + extra_ext, + casa=True, + casaext='.image') + fname_dict[tag] = prepped_sd_file - ########################################### - # Defined file names for various products # - ########################################### + # Singledish weight for use in linear mosaicking - def _fname_dict( - self, - target=None, - config=None, - product=None, - imaging_method='tclean', - extra_ext='', + tag = 'sd_weight' + sd_weight_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='singledish_weight' + extra_ext, + casa=True, + casaext='.image') + fname_dict[tag] = sd_weight_file + + # Singledish data aliged to a common grid for mosaicking + + tag = 'sd_aligned' + sd_align_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='singledish_aligned' + extra_ext, + casa=True, + casaext='.image') + fname_dict[tag] = sd_align_file + + # Singledish weight for use in linear mosaicking now on a + # common astrometric grid + + tag = 'sd_weight_aligned' + sd_weight_aligned_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='singledish_weight_aligned' + extra_ext, + casa=True, + casaext='.image') + fname_dict[tag] = sd_weight_aligned_file + + # Compressed files with edges trimmed off and smallest + # reasonable pixel size. + + tag = 'trimmed' + trimmed_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='trimmed' + extra_ext, + casa=True, + casaext='.image') + fname_dict[tag] = trimmed_file + + tag = 'pbcorr_trimmed' + pbcorr_trimmed_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='pbcorr_trimmed' + extra_ext, + casa=True, + casaext='.image') + fname_dict[tag] = pbcorr_trimmed_file + + tag = 'trimmed_pb' + trimmed_pb_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='trimmed' + extra_ext, + casa=True, + casaext='.pb') + fname_dict[tag] = trimmed_pb_file + + # Files converted to Kelvin, including FITS output files + + tag = 'trimmed_k' + trimmed_k_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='trimmed_k' + extra_ext, + casa=True, + casaext='.image') + fname_dict[tag] = trimmed_k_file + + tag = 'trimmed_k_fits' + trimmed_k_fits = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='trimmed_k' + extra_ext, + casa=False) + fname_dict[tag] = trimmed_k_fits + + tag = 'pbcorr_trimmed_k' + pbcorr_trimmed_k_file = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='pbcorr_trimmed_k' + extra_ext, + casa=True, + casaext='.image') + fname_dict[tag] = pbcorr_trimmed_k_file + + tag = 'pbcorr_trimmed_k_fits' + pbcorr_trimmed_k_fits = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='pbcorr_trimmed_k' + extra_ext, + casa=False) + fname_dict[tag] = pbcorr_trimmed_k_fits + + tag = 'trimmed_pb_fits' + trimmed_pb_fits = utilsFilenames.get_cube_filename( + target=target, config=config, product=product, + ext='trimmed_pb' + extra_ext, + casa=False) + fname_dict[tag] = trimmed_pb_fits + + # Return + + return (fname_dict) + + # endregion + + # region "Tasks" : Individual postprocessing steps + + def task_stage_interf_data( + self, + target=None, + product=None, + config=None, + imaging_method='tclean', + extra_ext_in='', + extra_ext_out='', + check_files=True, + trim_coarse_beam_edge_channels=False, ): - """ - Make the file name dictionary for all postprocess files. This - will give the a big dictionary of names where one can look up - each type of file (e.g., primary beam corrected, single dish - aligned, etc.) given some target, config, and product. This - routine has a lot of hard-coded knowledge about our - postprocessing conventions. - """ + """ + For one target, product, config combination copy the + interferometric cube and primary beam file to the working + postprocessing directory. + """ + + # Generate file names + + indir = self._kh.get_imaging_dir_for_target(target) + outdir = self._kh.get_postprocess_dir_for_target(target) + fname_dict_in = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_in, imaging_method=imaging_method) + fname_dict_out = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_out, imaging_method=imaging_method) + + # Copy the primary beam and the interferometric imaging + + for this_tag in ['orig', 'pb']: + + infile = fname_dict_in[this_tag] + outfile = fname_dict_out[this_tag] + + # Check input file existence + if check_files: + if not (os.path.isdir(indir + infile)): + logger.warning("Missing " + indir + infile) + continue + + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Staging data for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") + # logger.info("Using ccr.copy_dropdeg.") + logger.info("Staging " + outfile) + + # Move the cubes to the postprocess directory, trimming along the way + # (though not rebinning) + if not self._dry_run: + os.system('rm -rf ' + outdir + outfile) + os.system('rm -rf ' + outdir + outfile + ".temp") + os.system('rm -rf ' + outdir + outfile + ".temp_deg") + # os.system('cp -r ' + indir + infile + ' ' + outdir + outfile) + + ccr.trim_cube( + infile=indir + infile, + outfile=outdir + outfile, + overwrite=True, + inplace=False, + pad=1, + rebin=False, + ) + # ccr.copy_dropdeg( + # infile=indir+infile, + # outfile=outdir+outfile, + # overwrite=True) + + # in case of merged datasets with non-identical frequency setups imaged with per-plane beam, + # some edge channels will have much coarser beam, we trim these edge channels here. + if trim_coarse_beam_edge_channels: + ccr.trim_coarse_beam_edge_channels( + infile=outdir + fname_dict_out['orig'], + inpbfile=outdir + fname_dict_out['pb'], + inplace=True, + ) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Error checking - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - - if target is None: - logger.error("Need a target.") - return() - - if product is None: - logger.error("Need a product.") - return() - - if config is None: - logger.error("Need a config.") - return() - - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Initialize - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - - fname_dict = {} - - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Original files - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - - # Original cube - - tag = 'orig' - - if imaging_method == 'tclean': - casaext = '.image' - elif imaging_method == 'sdintimaging': - casaext = '.joint.cube.image' - else: - logger.error('imaging_method %s not recognised' % imaging_method) - raise Exception('imaging_method %s not recognised' % imaging_method) - - orig_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = None, - casa = True, - casaext = casaext) - fname_dict[tag] = orig_file - - # Original primary beam file - - if imaging_method == 'tclean': - casaext = '.pb' - elif imaging_method == 'sdintimaging': - casaext = '.joint.cube.pb' - - tag = 'pb' - pb_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = None, - casa = True, - casaext = casaext) - fname_dict[tag] = pb_file - - # Original single dish file (note that this comes with a - # directory) - - has_sd = self._kh.has_singledish(target=target, product=product) - tag = 'orig_sd' - if has_sd: - orig_sd_file = self._kh.get_sd_filename( - target = target, product = product) - fname_dict[tag] = orig_sd_file - else: - fname_dict[tag] = '' - - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Processed files (apply the extra_ext tag here) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - - # Primary beam corrected file - - tag = 'pbcorr' - pbcorr_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'pbcorr'+extra_ext, - casa = True, - casaext = '.image') - fname_dict[tag] = pbcorr_file - - # Files with round beams - - tag = 'round' - round_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'round'+extra_ext, - casa = True, - casaext = '.image') - fname_dict[tag] = round_file - - tag = 'pbcorr_round' - pbcorr_round_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'pbcorr_round'+extra_ext, - casa = True, - casaext = '.image') - fname_dict[tag] = pbcorr_round_file - - # Weight file for use in linear mosaicking - - tag = 'weight' - weight_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'weight'+extra_ext, - casa = True, - casaext = '.image') - fname_dict[tag] = weight_file - - tag = 'weight_aligned' - weight_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'weight_aligned'+extra_ext, - casa = True, - casaext = '.image') - fname_dict[tag] = weight_file - - # Common resolution parts for mosaic - - tag = 'linmos_commonres' - commonres_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'linmos_commonres'+extra_ext, - casa = True, - casaext = '.image') - fname_dict[tag] = commonres_file - - # Aligned parts for mosaic - - tag = 'linmos_aligned' - aligned_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'linmos_aligned'+extra_ext, - casa = True, - casaext = '.image') - fname_dict[tag] = aligned_file - - # Imported single dish file aligned to the interfometer data - - tag = 'prepped_sd' - prepped_sd_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'singledish'+extra_ext, - casa = True, - casaext = '.image') - fname_dict[tag] = prepped_sd_file - - # Singledish weight for use in linear mosaicking - - tag = 'sd_weight' - sd_weight_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'singledish_weight'+extra_ext, - casa = True, - casaext = '.image') - fname_dict[tag] = sd_weight_file - - # Singledish data aliged to a common grid for mosaicking - - tag = 'sd_aligned' - sd_align_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'singledish_aligned'+extra_ext, - casa = True, - casaext = '.image') - fname_dict[tag] = sd_align_file - - # Singledish weight for use in linear mosaicking now on a - # common astrometric grid - - tag = 'sd_weight_aligned' - sd_weight_aligned_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'singledish_weight_aligned'+extra_ext, - casa = True, - casaext = '.image') - fname_dict[tag] = sd_weight_aligned_file - - # Compressed files with edges trimmed off and smallest - # reasonable pixel size. - - tag = 'trimmed' - trimmed_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'trimmed'+extra_ext, - casa = True, - casaext = '.image') - fname_dict[tag] = trimmed_file - - tag = 'pbcorr_trimmed' - pbcorr_trimmed_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'pbcorr_trimmed'+extra_ext, - casa = True, - casaext = '.image') - fname_dict[tag] = pbcorr_trimmed_file - - tag = 'trimmed_pb' - trimmed_pb_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'trimmed'+extra_ext, - casa = True, - casaext = '.pb') - fname_dict[tag] = trimmed_pb_file - - # Files converted to Kelvin, including FITS output files - - tag = 'trimmed_k' - trimmed_k_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'trimmed_k'+extra_ext, - casa = True, - casaext = '.image') - fname_dict[tag] = trimmed_k_file - - tag = 'trimmed_k_fits' - trimmed_k_fits = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'trimmed_k'+extra_ext, - casa = False) - fname_dict[tag] = trimmed_k_fits - - tag = 'pbcorr_trimmed_k' - pbcorr_trimmed_k_file = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'pbcorr_trimmed_k'+extra_ext, - casa = True, - casaext = '.image') - fname_dict[tag] = pbcorr_trimmed_k_file - - tag = 'pbcorr_trimmed_k_fits' - pbcorr_trimmed_k_fits = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'pbcorr_trimmed_k'+extra_ext, - casa = False) - fname_dict[tag] = pbcorr_trimmed_k_fits - - tag = 'trimmed_pb_fits' - trimmed_pb_fits = utilsFilenames.get_cube_filename( - target = target, config = config, product = product, - ext = 'trimmed_pb'+extra_ext, - casa = False) - fname_dict[tag] = trimmed_pb_fits - - # Return - - return(fname_dict) - -#endregion - -#region "Tasks" : Individual postprocessing steps - - def task_stage_interf_data( - self, - target = None, - product = None, - config = None, - imaging_method='tclean', - extra_ext_in = '', - extra_ext_out = '', - check_files = True, - trim_coarse_beam_edge_channels = False, + return () + + def task_remove_degenerate_axes( + self, + target=None, + product=None, + config=None, + imaging_method='tclean', + extra_ext='', + check_files=True, ): - """ - For one target, product, config combination copy the - interferometric cube and primary beam file to the working - postprocessing directory. - """ + """ + Remove + degenerate axes for target, product, config combination in postprocessing directory. + """ + + # Generate file names + + file_dir = self._kh.get_postprocess_dir_for_target(target) + fname_dict = self._fname_dict(target=target, config=config, product=product, extra_ext=extra_ext, + imaging_method=imaging_method) - # Generate file names + # Copy the primary beam and the interferometric imaging + + logger.info("Dropping degenerate axes from postprocess image/pb files.") + + for this_tag in ['orig', 'pb', 'pbcorr', 'pbcorr_round']: + + file_name = fname_dict[this_tag] + + # Check input file existence + if check_files: + if not (os.path.isdir(file_dir + file_name)): + logger.warning("Missing " + file_dir + file_name) + continue + + if not self._dry_run: + ccr.copy_dropdeg(file_dir + file_name, file_dir + file_name + '_nodeg', overwrite=True) + + os.system('rm -rf ' + file_dir + file_name) + os.system('cp -r ' + file_dir + file_name + '_nodeg ' + file_dir + file_name) + os.system('rm -rf ' + file_dir + file_name + '_nodeg') + + return () + + def task_pbcorr( + self, + target=None, + product=None, + config=None, + imaging_method='tclean', + in_tag='orig', + out_tag='pbcorr', + extra_ext_in='', + extra_ext_out='', + check_files=True, + ): + """ + For one target, product, config combination primary beam + correct the interferometer data. + """ - indir = self._kh.get_imaging_dir_for_target(target) - outdir = self._kh.get_postprocess_dir_for_target(target) - fname_dict_in = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_in, imaging_method=imaging_method) - fname_dict_out = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_out, imaging_method=imaging_method) + # Generate file names - # Copy the primary beam and the interferometric imaging + indir = self._kh.get_postprocess_dir_for_target(target) + outdir = self._kh.get_postprocess_dir_for_target(target) + fname_dict_in = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_in, imaging_method=imaging_method) + fname_dict_out = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_out, imaging_method=imaging_method) - for this_tag in ['orig', 'pb']: + # Pull in the pblimit for setting the cutoff + recipe_list = self._kh.get_imaging_recipes(config=config, product=product) + clean_call = CleanCall(recipe_list) + cutoff = clean_call.get_param('pblimit') - infile = fname_dict_in[this_tag] - outfile = fname_dict_out[this_tag] + infile = fname_dict_in[in_tag] + outfile = fname_dict_out[out_tag] + pbfile = fname_dict_in['pb'] # Check input file existence + if check_files: - if not (os.path.isdir(indir+infile)): - logger.warning("Missing "+indir+infile) - continue + if not (os.path.isdir(indir + infile)): + logger.warning("Missing " + indir + infile) + return () + if not (os.path.isdir(indir + pbfile)): + logger.warning("Missing " + indir + pbfile) + return () + + # Apply the primary beam correction to the data. logger.info("") logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Staging data for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) + logger.info("Primary beam correction for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") logger.info("") - # logger.info("Using ccr.copy_dropdeg.") - logger.info("Staging "+outfile) - # Move the cubes to the postprocess directory, trimming along the way - # (though not rebinning) - if (not self._dry_run) and casa_enabled: - os.system('rm -rf ' + outdir + outfile) - os.system('rm -rf ' + outdir + outfile + ".temp") - os.system('rm -rf ' + outdir + outfile + ".temp_deg") - # os.system('cp -r ' + indir + infile + ' ' + outdir + outfile) + logger.info("Using ccr.primary_beam_correct") + logger.info("Correcting to " + outfile) + logger.info("Correcting from " + infile) + logger.info("Correcting using " + pbfile) - ccr.trim_cube( + if not self._dry_run: + ccr.primary_beam_correct( infile=indir + infile, outfile=outdir + outfile, - overwrite=True, - inplace=False, - pad=1, - rebin=False, - ) - # ccr.copy_dropdeg( - # infile=indir+infile, - # outfile=outdir+outfile, - # overwrite=True) - - # in case of merged datasets with non-identical frequency setups imaged with per-plane beam, - # some edge channels will have much coarser beam, we trim these edge channels here. - if trim_coarse_beam_edge_channels: - ccr.trim_coarse_beam_edge_channels( - infile=outdir+fname_dict_out['orig'], - inpbfile=outdir+fname_dict_out['pb'], - inplace=True, - ) + pbfile=indir + pbfile, + cutoff=cutoff, + overwrite=True) - return() - - def task_remove_degenerate_axes( - self, - target = None, - product = None, - config = None, - imaging_method='tclean', - extra_ext = '', - check_files = True, - ): - """ - Remove - degenerate axes for target, product, config combination in postprocessing directory. - """ + return () + + def task_round_beam( + self, + target=None, + product=None, + config=None, + imaging_method='tclean', + in_tag='pbcorr', + out_tag='pbcorr_round', + extra_ext_in='', + extra_ext_out='', + force_beam_as=None, + check_files=True, + ): + """ + For one target, product, config combination, convolve the cube + to have a round beam. Note that via the force_beam_as keyword + this task can also be used to convolve data to a fixed (round) + angular resolution. + """ - # Generate file names + # Generate file names - file_dir = self._kh.get_postprocess_dir_for_target(target) - fname_dict = self._fname_dict(target=target, config=config, product=product, extra_ext=extra_ext, - imaging_method=imaging_method) + indir = self._kh.get_postprocess_dir_for_target(target) + outdir = self._kh.get_postprocess_dir_for_target(target) + fname_dict_in = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_in, imaging_method=imaging_method) + fname_dict_out = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_out, imaging_method=imaging_method) - # Copy the primary beam and the interferometric imaging + infile = fname_dict_in[in_tag] + outfile = fname_dict_out[out_tag] - logger.info("Dropping degenerate axes from postprocess image/pb files.") + # Check input file existence - for this_tag in ['orig', 'pb', 'pbcorr', 'pbcorr_round']: + if check_files: + if not (os.path.isdir(indir + infile)): + logger.warning("Missing " + infile) + return () - file_name = fname_dict[this_tag] + # Convolve the data to have a round beam. - # Check input file existence - if check_files: - if not (os.path.isdir(file_dir+file_name)): - logger.warning("Missing "+file_dir+file_name) - continue + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Convolving to a round beam for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") + + logger.info("Using ccr.convolve_to_round_beam") + logger.info("Convolving from " + infile) + logger.info("Convolving to " + outfile) + if force_beam_as is not None: + logger.info("Forcing beam to " + str(force_beam_as)) if not self._dry_run: - ccr.copy_dropdeg(file_dir + file_name, file_dir + file_name + '_nodeg', overwrite=True) - - os.system('rm -rf ' + file_dir + file_name) - os.system('cp -r ' + file_dir + file_name + '_nodeg ' + file_dir + file_name) - os.system('rm -rf ' + file_dir + file_name + '_nodeg') - - return() - - def task_pbcorr( - self, - target = None, - product = None, - config = None, - imaging_method='tclean', - in_tag = 'orig', - out_tag = 'pbcorr', - extra_ext_in = '', - extra_ext_out = '', - check_files = True, - ): - """ - For one target, product, config combination primary beam - correct the interferometer data. - """ + ccr.convolve_to_round_beam( + infile=indir + infile, + outfile=outdir + outfile, + force_beam=force_beam_as, + overwrite=True) - # Generate file names - - indir = self._kh.get_postprocess_dir_for_target(target) - outdir = self._kh.get_postprocess_dir_for_target(target) - fname_dict_in = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_in, imaging_method=imaging_method) - fname_dict_out = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_out, imaging_method=imaging_method) - - # Pull in the pblimit for setting the cutoff - recipe_list = self._kh.get_imaging_recipes(config=config, product=product) - clean_call = CleanCall(recipe_list) - cutoff = clean_call.get_param('pblimit') - - infile = fname_dict_in[in_tag] - outfile = fname_dict_out[out_tag] - pbfile = fname_dict_in['pb'] - - # Check input file existence - - if check_files: - if not (os.path.isdir(indir+infile)): - logger.warning("Missing "+indir+infile) - return() - if not (os.path.isdir(indir+pbfile)): - logger.warning("Missing "+indir+pbfile) - return() - - # Apply the primary beam correction to the data. - - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Primary beam correction for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") - - logger.info("Using ccr.primary_beam_correct") - logger.info("Correcting to "+outfile) - logger.info("Correcting from "+infile) - logger.info("Correcting using "+pbfile) - - if (not self._dry_run) and casa_enabled: - ccr.primary_beam_correct( - infile=indir+infile, - outfile=outdir+outfile, - pbfile=indir+pbfile, - cutoff=cutoff, - overwrite=True) - - return() - - def task_round_beam( - self, - target = None, - product = None, - config = None, - imaging_method='tclean', - in_tag = 'pbcorr', - out_tag = 'pbcorr_round', - extra_ext_in = '', - extra_ext_out = '', - force_beam_as = None, - check_files = True, + return () + + def task_stage_singledish( + self, + target=None, + product=None, + config=None, + template_tag='pbcorr_round', + out_tag='prepped_sd', + extra_ext_in='', + extra_ext_out='', + check_files=True, ): - """ - For one target, product, config combination, convolve the cube - to have a round beam. Note that via the force_beam_as keyword - this task can also be used to convolve data to a fixed (round) - angular resolution. - """ + """ + For one target, product, config combination, copy the single + dish data and align it to the interferometric grid. + """ - # Generate file names - - indir = self._kh.get_postprocess_dir_for_target(target) - outdir = self._kh.get_postprocess_dir_for_target(target) - fname_dict_in = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_in, imaging_method=imaging_method) - fname_dict_out = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_out, imaging_method=imaging_method) - - infile = fname_dict_in[in_tag] - outfile = fname_dict_out[out_tag] - - # Check input file existence - - if check_files: - if not (os.path.isdir(indir+infile)): - logger.warning("Missing "+infile) - return() - - # Convolve the data to have a round beam. - - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Convolving to a round beam for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") - - logger.info("Using ccr.convolve_to_round_beam") - logger.info("Convolving from "+infile) - logger.info("Convolving to "+outfile) - if force_beam_as is not None: - logger.info("Forcing beam to "+str(force_beam_as)) - - if (not self._dry_run) and casa_enabled: - ccr.convolve_to_round_beam( - infile=indir+infile, - outfile=outdir+outfile, - force_beam=force_beam_as, - overwrite=True) - - return() - - def task_stage_singledish( - self, - target = None, - product = None, - config = None, - template_tag = 'pbcorr_round', - out_tag = 'prepped_sd', - extra_ext_in = '', - extra_ext_out = '', - check_files = True, - ): - """ - For one target, product, config combination, copy the single - dish data and align it to the interferometric grid. - """ + # Generate file names - # Generate file names - - indir = '' - outdir = self._kh.get_postprocess_dir_for_target(target) - tempdir = self._kh.get_postprocess_dir_for_target(target) - fname_dict_in = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_in) - fname_dict_out = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_out) - - template = fname_dict_in[template_tag] - infile = fname_dict_in['orig_sd'] - outfile = fname_dict_out[out_tag] - - # Check input file existence - - if check_files: - if (not (os.path.isdir(indir+infile))) and \ - (not (os.path.isfile(indir+infile))): - logger.warning("Missing "+infile) - return() - if not (os.path.isdir(tempdir+template)): - logger.warning("Missing "+tempdir+template) - return() - - # Stage the singledish data for feathering - - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Preparing single dish data for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") - - logger.info("Using cfr.prep_sd_for_feather.") - logger.info("Prepping "+outfile) - logger.info("Original file "+infile) - logger.info("Using interferometric template "+template) - - if (not self._dry_run) and casa_enabled: - cfr.prep_sd_for_feather( - sdfile_in=indir+infile, - sdfile_out=outdir+outfile, - interf_file=tempdir+template, - do_import=True, - do_dropdeg=True, - do_align=True, - do_checkunits=True, - overwrite=True) - - return() - - def task_make_interf_weight( - self, - target = None, - product = None, - config = None, - imaging_method='tclean', - image_tag = 'pbcorr_round', - in_tag = 'pb', - input_type = 'pb', - scale_by_noise = True, - out_tag = 'weight', - extra_ext_in = '', - extra_ext_out = '', - check_files = True, - ): - """ - For one target, product, config combination, make a 'weight' - image for use in linearly mosaicking the cube with other, - overlapping cubes. This task targets interferometric dish - data. - """ + indir = '' + outdir = self._kh.get_postprocess_dir_for_target(target) + tempdir = self._kh.get_postprocess_dir_for_target(target) + fname_dict_in = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_in) + fname_dict_out = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_out) - # Generate file names - - indir = self._kh.get_postprocess_dir_for_target(target) - outdir = self._kh.get_postprocess_dir_for_target(target) - fname_dict_in = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_in, imaging_method=imaging_method) - fname_dict_out = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_out, imaging_method=imaging_method) - - image_file = fname_dict_in[image_tag] - infile = fname_dict_in[in_tag] - outfile = fname_dict_out[out_tag] - - # Check input file existence - - if check_files: - if not (os.path.isdir(indir+infile)): - logger.warning("Missing "+infile) - return() - if not (os.path.isdir(indir+image_file)): - logger.warning("Missing "+image_file) - return() - - # Create a weight image for use linear mosaicking targets that - # are part of a linear mosaic - - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Making weight file for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") - - logger.info("Using cmr.generate_weight_file.") - logger.info("Making weight file "+outfile) - logger.info("Based off of primary beam file "+infile) - logger.info("Measuring noise from file "+image_file) - - if (not self._dry_run) and casa_enabled: - cmr.generate_weight_file( - image_file = indir+image_file, - input_file = indir+infile, - input_type = input_type, - outfile = indir + outfile, - scale_by_noise = scale_by_noise, - overwrite=True) - - return() - - def task_make_singledish_weight( - self, - target = None, - product = None, - config = None, - image_tag = 'prepped_sd', - out_tag = 'sd_weight', - extra_ext_in = '', - extra_ext_out = '', - check_files = True, - ): - """ - For one target, product, config combination, make a 'weight' - image for use in linearly mosaicking the cube with other, - overlapping cubes. This task targets single dish data. - """ + template = fname_dict_in[template_tag] + infile = fname_dict_in['orig_sd'] + outfile = fname_dict_out[out_tag] + + # Check input file existence - # Generate file names - - indir = self._kh.get_postprocess_dir_for_target(target) - outdir = self._kh.get_postprocess_dir_for_target(target) - fname_dict_in = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_in) - fname_dict_out = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_out) - - image_file = fname_dict_in[image_tag] - outfile = fname_dict_out[out_tag] - - # Check input file existence - - if check_files: - if not (os.path.isdir(indir+image_file)): - logger.warning("Missing "+image_file) - return() - - # Make a weight file for single dish targets that - # are part of a linear mosaic - - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("Making single dish weight file for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") - logger.info("") - - logger.info("Using cmr.generate_weight_file.") - logger.info("Making weight file "+outfile) - logger.info("Measuring noise from file "+image_file) - - if (not self._dry_run) and casa_enabled: - cmr.generate_weight_file( - image_file = indir+image_file, - input_value = 1.0, - input_type = 'weight', - outfile = indir + outfile, - scale_by_noise = True, - overwrite=True) - - return() - - def task_feather( - self, - target = None, - product = None, - config = None, - interf_tag = 'pbcorr_round', - sd_tag = 'prepped_sd', - out_tag = 'pbcorr_round', - extra_ext_in = '', - extra_ext_out = '', - apodize = False, - apod_ext = 'pb', - copy_weights = True, - check_files = True, + if check_files: + if (not (os.path.isdir(indir + infile))) and \ + (not (os.path.isfile(indir + infile))): + logger.warning("Missing " + infile) + return () + if not (os.path.isdir(tempdir + template)): + logger.warning("Missing " + tempdir + template) + return () + + # Stage the singledish data for feathering + + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Preparing single dish data for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") + + logger.info("Using cfr.prep_sd_for_feather.") + logger.info("Prepping " + outfile) + logger.info("Original file " + infile) + logger.info("Using interferometric template " + template) + + if not self._dry_run: + cfr.prep_sd_for_feather( + sdfile_in=indir + infile, + sdfile_out=outdir + outfile, + interf_file=tempdir + template, + do_import=True, + do_dropdeg=True, + do_align=True, + do_checkunits=True, + overwrite=True) + + return () + + def task_make_interf_weight( + self, + target=None, + product=None, + config=None, + imaging_method='tclean', + image_tag='pbcorr_round', + in_tag='pb', + input_type='pb', + scale_by_noise=True, + out_tag='weight', + extra_ext_in='', + extra_ext_out='', + check_files=True, ): - """ - For one target, product, config combination, feather together - a single dish and interferometric data set. Note that - apodization is exposed as an option. Also note that the - configuration of the input and output will differ (an - interferometric configuration comes in, a feather - configuration comes out). Optionally, propagate the weights - from the interferometric side to become the weights for the - new feathered data. - """ + """ + For one target, product, config combination, make a 'weight' + image for use in linearly mosaicking the cube with other, + overlapping cubes. This task targets interferometric dish + data. + """ + + # Generate file names + + indir = self._kh.get_postprocess_dir_for_target(target) + outdir = self._kh.get_postprocess_dir_for_target(target) + fname_dict_in = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_in, imaging_method=imaging_method) + fname_dict_out = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_out, imaging_method=imaging_method) + + image_file = fname_dict_in[image_tag] + infile = fname_dict_in[in_tag] + outfile = fname_dict_out[out_tag] - # Generate file names + # Check input file existence - indir = self._kh.get_postprocess_dir_for_target(target) - outdir = self._kh.get_postprocess_dir_for_target(target) - fname_dict_in = self._fname_dict( - target=target, config=config, product=product, - extra_ext=extra_ext_in) + if check_files: + if not (os.path.isdir(indir + infile)): + logger.warning("Missing " + infile) + return () + if not (os.path.isdir(indir + image_file)): + logger.warning("Missing " + image_file) + return () - # Note that feather changes the config + # Create a weight image for use linear mosaicking targets that + # are part of a linear mosaic - feather_config = self._kh.get_feather_config_for_interf_config( - interf_config=config) + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Making weight file for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") - fname_dict_out = self._fname_dict( - target=target, config=feather_config, product=product, - extra_ext=extra_ext_out) + logger.info("Using cmr.generate_weight_file.") + logger.info("Making weight file " + outfile) + logger.info("Based off of primary beam file " + infile) + logger.info("Measuring noise from file " + image_file) - interf_file = fname_dict_in[interf_tag] - sd_file = fname_dict_in[sd_tag] - if len(fname_dict_out) == 0: - logger.info("No feather config found for:") - logger.info(str(target) + " , "+str(product)+" , "+str(config)) - return() - outfile = fname_dict_out[out_tag] + if not self._dry_run: + cmr.generate_weight_file( + image_file=indir + image_file, + input_file=indir + infile, + input_type=input_type, + outfile=indir + outfile, + scale_by_noise=scale_by_noise, + overwrite=True) - # Error checking + return () + + def task_make_singledish_weight( + self, + target=None, + product=None, + config=None, + image_tag='prepped_sd', + out_tag='sd_weight', + extra_ext_in='', + extra_ext_out='', + check_files=True, + ): + """ + For one target, product, config combination, make a 'weight' + image for use in linearly mosaicking the cube with other, + overlapping cubes. This task targets single dish data. + """ - # Check input file existence + # Generate file names - # Feather the single dish and interferometer data + indir = self._kh.get_postprocess_dir_for_target(target) + outdir = self._kh.get_postprocess_dir_for_target(target) + fname_dict_in = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_in) + fname_dict_out = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_out) - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Feathering interferometer and single dish data for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") + image_file = fname_dict_in[image_tag] + outfile = fname_dict_out[out_tag] - logger.info("Using cfr.feather_two_cubes.") - logger.info("Feathering "+outfile) - logger.info("Feathering interferometric data "+interf_file) - logger.info("Feathering single dish data "+sd_file) + # Check input file existence - # Feather has a couple of algorithmic choices - # associated with it. Run the method that the - # user has selected. + if check_files: + if not (os.path.isdir(indir + image_file)): + logger.warning("Missing " + image_file) + return () - if apodize: + # Make a weight file for single dish targets that + # are part of a linear mosaic - apod_file = fname_dict_in[apod_ext] + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("Making single dish weight file for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&") + logger.info("") - logger.info("Apodizing using file "+apod_file) + logger.info("Using cmr.generate_weight_file.") + logger.info("Making weight file " + outfile) + logger.info("Measuring noise from file " + image_file) if not self._dry_run: - cfr.feather_two_cubes( - interf_file=indir+interf_file, - sd_file=indir+sd_file, - out_file=outdir+outfile, - do_blank=True, - do_apodize=True, - apod_file=indir+apod_file, - apod_cutoff=0.0, + cmr.generate_weight_file( + image_file=indir + image_file, + input_value=1.0, + input_type='weight', + outfile=indir + outfile, + scale_by_noise=True, overwrite=True) - else: - - if (not self._dry_run) and casa_enabled: - cfr.feather_two_cubes( - interf_file=indir+interf_file, - sd_file=indir+sd_file, - out_file=outdir+outfile, - do_blank=True, - do_apodize=False, - apod_file=None, - apod_cutoff=-1.0, - overwrite=True) + return () + + def task_feather( + self, + target=None, + product=None, + config=None, + interf_tag='pbcorr_round', + sd_tag='prepped_sd', + out_tag='pbcorr_round', + extra_ext_in='', + extra_ext_out='', + apodize=False, + apod_ext='pb', + copy_weights=True, + check_files=True, + ): + """ + For one target, product, config combination, feather together + a single dish and interferometric data set. Note that + apodization is exposed as an option. Also note that the + configuration of the input and output will differ (an + interferometric configuration comes in, a feather + configuration comes out). Optionally, propagate the weights + from the interferometric side to become the weights for the + new feathered data. + """ + + # Generate file names + + indir = self._kh.get_postprocess_dir_for_target(target) + outdir = self._kh.get_postprocess_dir_for_target(target) + fname_dict_in = self._fname_dict( + target=target, config=config, product=product, + extra_ext=extra_ext_in) - if copy_weights: + # Note that feather changes the config - interf_weight_exists = False - interf_weight_file = fname_dict_in['weight'] - if os.path.isdir(indir+interf_weight_file): - interf_weight_exists = True - else: - logger.info("Interferometric weight file not found "+interf_weight_file) + feather_config = self._kh.get_feather_config_for_interf_config( + interf_config=config) - if interf_weight_exists: - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Copying weights for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") + fname_dict_out = self._fname_dict( + target=target, config=feather_config, product=product, + extra_ext=extra_ext_out) - out_weight_file=fname_dict_out['weight'] + interf_file = fname_dict_in[interf_tag] + sd_file = fname_dict_in[sd_tag] + if len(fname_dict_out) == 0: + logger.info("No feather config found for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + return () + outfile = fname_dict_out[out_tag] - logger.info("Copying from "+interf_weight_file) - logger.info("Copying to "+out_weight_file) - if (not self._dry_run) and casa_enabled: - ccr.copy_dropdeg(infile=indir+interf_weight_file, - outfile=outdir+out_weight_file, - overwrite=True) - return() + # Error checking - def task_rename_sdintimaging(self, - target=None, - product=None, - config=None, - imaging_method='sdintimaging'): + # Check input file existence - if target is None: - logger.warning('Missing target') - return - if product is None: - logger.warning('Missing product') - return - if config is None: - logger.warning('Missing config') - return + # Feather the single dish and interferometer data - if imaging_method != 'sdintimaging': - logger.warning('This should only be run for sdintimaging') - return + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Feathering interferometer and single dish data for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") - fname_dict_in = self._fname_dict(target=target, product=product, config=config, - imaging_method=imaging_method) + logger.info("Using cfr.feather_two_cubes.") + logger.info("Feathering " + outfile) + logger.info("Feathering interferometric data " + interf_file) + logger.info("Feathering single dish data " + sd_file) - imaging_dir = self._kh.get_imaging_dir_for_target(target) - using_sdint = os.path.isdir(imaging_dir + fname_dict_in['orig']) - is_mosaic = self._kh.is_target_linmos(target) + # Feather has a couple of algorithmic choices + # associated with it. Run the method that the + # user has selected. - # If not a mosaic and we're not sdintimaging, skip + if apodize: - if not using_sdint and not is_mosaic: - return + apod_file = fname_dict_in[apod_ext] - if is_mosaic: + logger.info("Apodizing using file " + apod_file) - # In the case where we have a mosaic, look for the telltale sdint files + if not self._dry_run: + cfr.feather_two_cubes( + interf_file=indir + interf_file, + sd_file=indir + sd_file, + out_file=outdir + outfile, + do_blank=True, + do_apodize=True, + apod_file=indir + apod_file, + apod_cutoff=0.0, + overwrite=True) - mosaic_parts = self._kh.get_parts_for_linmos(target) - for mosaic_part in mosaic_parts: - fname_dict_mosaic = self._fname_dict( - target=mosaic_part, product=product, config=config, - imaging_method=imaging_method) - imaging_dir = self._kh.get_imaging_dir_for_target(mosaic_part) - using_sdint = os.path.isdir(imaging_dir + fname_dict_mosaic['orig']) - if using_sdint: - break - if not using_sdint: + else: + + if not self._dry_run: + cfr.feather_two_cubes( + interf_file=indir + interf_file, + sd_file=indir + sd_file, + out_file=outdir + outfile, + do_blank=True, + do_apodize=False, + apod_file=None, + apod_cutoff=-1.0, + overwrite=True) + + if copy_weights: + + interf_weight_exists = False + interf_weight_file = fname_dict_in['weight'] + if os.path.isdir(indir + interf_weight_file): + interf_weight_exists = True + else: + logger.info("Interferometric weight file not found " + interf_weight_file) + + if interf_weight_exists: + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Copying weights for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") + + out_weight_file = fname_dict_out['weight'] + + logger.info("Copying from " + interf_weight_file) + logger.info("Copying to " + out_weight_file) + if not self._dry_run: + ccr.copy_dropdeg(infile=indir + interf_weight_file, + outfile=outdir + out_weight_file, + overwrite=True) + return () + + def task_rename_sdintimaging(self, + target=None, + product=None, + config=None, + imaging_method='sdintimaging'): + + if target is None: + logger.warning('Missing target') + return + if product is None: + logger.warning('Missing product') + return + if config is None: + logger.warning('Missing config') return - outdir = self._kh.get_postprocess_dir_for_target(target) - feather_config = self._kh.get_feather_config_for_interf_config(interf_config=config) - fname_dict_out = self._fname_dict(target=target, product=product, config=feather_config) - - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Renaming sdintimaging outputs for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") - - for key in fname_dict_in.keys(): - item = fname_dict_in[key] - - # Make sure we don't just delete the whole postprocess folder - if item == '': - continue - - file_name = outdir + item - if os.path.exists(file_name): - new_file_name = outdir + fname_dict_out[key] - command = 'mv -f %s %s' % (file_name, new_file_name) - os.system('rm -rf %s' % new_file_name) - os.system(command) - - return - - def task_compress( - self, - target = None, - product = None, - config = None, - imaging_method='tclean', - in_tag = 'pbcorr_round', - out_tag = 'pbcorr_trimmed', - do_trimrind = True, - do_pb_too = True, - in_pb_tag = 'pb', - out_pb_tag = 'pb_trimmed', - extra_ext_in = '', - extra_ext_out = '', - check_files = True - ): - """ - For one target, product, config combination, compress the cube - to the smallest reasonable volume. Also align the primary beam - file out onto this grid. - """ + if imaging_method != 'sdintimaging': + logger.warning('This should only be run for sdintimaging') + return - # Generate file names + fname_dict_in = self._fname_dict(target=target, product=product, config=config, + imaging_method=imaging_method) - indir = self._kh.get_postprocess_dir_for_target(target) - outdir = self._kh.get_postprocess_dir_for_target(target) - fname_dict_in = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_in, imaging_method=imaging_method) - fname_dict_out = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_out, imaging_method=imaging_method) + imaging_dir = self._kh.get_imaging_dir_for_target(target) + using_sdint = os.path.isdir(imaging_dir + fname_dict_in['orig']) + is_mosaic = self._kh.is_target_linmos(target) - infile = fname_dict_in['pbcorr_round'] - outfile = fname_dict_out['pbcorr_trimmed'] + # If not a mosaic and we're not sdintimaging, skip - infile_pb = fname_dict_in['pb'] - outfile_pb = fname_dict_out['trimmed_pb'] + if not using_sdint and not is_mosaic: + return - # Check input file existence + if is_mosaic: - if check_files: - if not (os.path.isdir(indir+infile)): - logger.warning("Missing "+infile) - return() + # In the case where we have a mosaic, look for the telltale sdint files - # Compress, reducing cube volume. + mosaic_parts = self._kh.get_parts_for_linmos(target) + for mosaic_part in mosaic_parts: + fname_dict_mosaic = self._fname_dict( + target=mosaic_part, product=product, config=config, + imaging_method=imaging_method) + imaging_dir = self._kh.get_imaging_dir_for_target(mosaic_part) + using_sdint = os.path.isdir(imaging_dir + fname_dict_mosaic['orig']) + if using_sdint: + break + if not using_sdint: + return - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Trimming cube for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") + outdir = self._kh.get_postprocess_dir_for_target(target) + feather_config = self._kh.get_feather_config_for_interf_config(interf_config=config) + fname_dict_out = self._fname_dict(target=target, product=product, config=feather_config) - logger.info("Producing "+outfile+" using ccr.trim_cube.") - logger.info("Trimming from original file "+infile) + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Renaming sdintimaging outputs for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") - if (not self._dry_run) and casa_enabled: - ccr.trim_cube( - infile=indir+infile, - outfile=outdir+outfile, - overwrite=True, - inplace=False, - min_pixperbeam=3, - pad=1) + for key in fname_dict_in.keys(): + item = fname_dict_in[key] - if do_trimrind: - ccr.trim_rind( - infile=outdir+outfile, - inplace=True, - pixels=1) + # Make sure we don't just delete the whole postprocess folder + if item == '': + continue + file_name = outdir + item + if os.path.exists(file_name): + new_file_name = outdir + fname_dict_out[key] + command = 'mv -f %s %s' % (file_name, new_file_name) + os.system('rm -rf %s' % new_file_name) + os.system(command) - if do_pb_too is False: - return() + return - if check_files: - if not (os.path.isdir(indir+infile_pb)): - logger.warning("Missing "+infile_pb) - return() + def task_compress( + self, + target=None, + product=None, + config=None, + imaging_method='tclean', + in_tag='pbcorr_round', + out_tag='pbcorr_trimmed', + do_trimrind=True, + do_pb_too=True, + in_pb_tag='pb', + out_pb_tag='pb_trimmed', + extra_ext_in='', + extra_ext_out='', + check_files=True + ): + """ + For one target, product, config combination, compress the cube + to the smallest reasonable volume. Also align the primary beam + file out onto this grid. + """ - template = fname_dict_out['pbcorr_trimmed'] + # Generate file names - if check_files: - if not (os.path.isdir(outdir+template)): - logger.warning("Missing "+template) - return() + indir = self._kh.get_postprocess_dir_for_target(target) + outdir = self._kh.get_postprocess_dir_for_target(target) + fname_dict_in = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_in, imaging_method=imaging_method) + fname_dict_out = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_out, imaging_method=imaging_method) - logger.info("Aligning primary beam image to new astrometry") - logger.info("Using ccr.align_to_target.") - logger.info("Aligning original file "+infile_pb) - logger.info("Aligning to produce output file "+outfile_pb) - logger.info("Aligning to template "+template) + infile = fname_dict_in['pbcorr_round'] + outfile = fname_dict_out['pbcorr_trimmed'] - if not self._dry_run: - ccr.align_to_target( - infile=indir+infile_pb, - outfile=outdir+outfile_pb, - template=outdir+template, - interpolation='cubic', - overwrite=True, - ) + infile_pb = fname_dict_in['pb'] + outfile_pb = fname_dict_out['trimmed_pb'] - return() - - def task_convert_units( - self, - target = None, - product = None, - config = None, - imaging_method='tclean', - in_tag = 'pbcorr_trimmed', - out_tag = 'pbcorr_trimmed_k', - extra_ext_in = '', - extra_ext_out = '', - check_files = True, - ): - """ - For one target, config, product combination convert the units - from Jy/beam to Kelvin. - """ + # Check input file existence - # Generate file names + if check_files: + if not (os.path.isdir(indir + infile)): + logger.warning("Missing " + infile) + return () - indir = self._kh.get_postprocess_dir_for_target(target) - outdir = self._kh.get_postprocess_dir_for_target(target) - fname_dict_in = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_in, imaging_method=imaging_method) - fname_dict_out = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_out, imaging_method=imaging_method) + # Compress, reducing cube volume. - infile = fname_dict_in[in_tag] - outfile = fname_dict_out[out_tag] + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Trimming cube for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") - # Check input file existence + logger.info("Producing " + outfile + " using ccr.trim_cube.") + logger.info("Trimming from original file " + infile) - if check_files: - if not (os.path.isdir(indir+infile)): - logger.warning("Missing "+infile) - return() + if not self._dry_run: + ccr.trim_cube( + infile=indir + infile, + outfile=outdir + outfile, + overwrite=True, + inplace=False, + min_pixperbeam=3, + pad=1) - # Change units from Jy/beam to Kelvin. + if do_trimrind: + ccr.trim_rind( + infile=outdir + outfile, + inplace=True, + pixels=1) - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Converting units for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") + if do_pb_too is False: + return () - logger.info("Using ccr.convert_jytok") - logger.info("Creating "+outfile) - logger.info("Converting from original file "+infile) + if check_files: + if not (os.path.isdir(indir + infile_pb)): + logger.warning("Missing " + infile_pb) + return () - if (not self._dry_run) and casa_enabled: - ccr.convert_jytok( - infile=indir+infile, - outfile=outdir+outfile, - overwrite=True, - inplace=False, - ) + template = fname_dict_out['pbcorr_trimmed'] - return() - - def task_export_to_fits( - self, - target = None, - product = None, - config = None, - imaging_method='tclean', - in_tag = 'pbcorr_trimmed_k', - out_tag = 'pbcorr_trimmed_k_fits', - do_pb_too = True, - in_pb_tag = 'trimmed_pb', - out_pb_tag = 'trimmed_pb_fits', - extra_ext_in = '', - extra_ext_out = '', - check_files = True, - ): - """ - For one target, config, product combination export to - FITS. Optionally also export the primary beam files. - """ + if check_files: + if not (os.path.isdir(outdir + template)): + logger.warning("Missing " + template) + return () - # Generate file names - - indir = self._kh.get_postprocess_dir_for_target(target) - outdir = self._kh.get_postprocess_dir_for_target(target) - fname_dict_in = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_in, imaging_method=imaging_method) - fname_dict_out = self._fname_dict( - target=target, config=config, product=product, extra_ext=extra_ext_out, imaging_method=imaging_method) - - infile = fname_dict_in[in_tag] - outfile = fname_dict_out[out_tag] - - # Check input file existence - - if check_files: - if not (os.path.isdir(indir+infile)): - logger.warning("Missing "+infile) - return() - - # Export to FITS and clean up output - - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Exporting data to FITS and cleaning up cubes for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") - - logger.info("Using ccr.export_and_cleanup.") - logger.info("Export to "+outfile) - logger.info("Writing from input cube "+infile) - - if not self._dry_run: - ccr.export_and_cleanup( - infile=indir+infile, - outfile=outdir+outfile, - overwrite=True, - remove_cards=[], - add_cards={'OBJECT':target.upper()}, - add_history=[], - zap_history=True, - round_beam=True, - roundbeam_tol=0.01, - ) + logger.info("Aligning primary beam image to new astrometry") + logger.info("Using ccr.align_to_target.") + logger.info("Aligning original file " + infile_pb) + logger.info("Aligning to produce output file " + outfile_pb) + logger.info("Aligning to template " + template) - if do_pb_too is False: - return() - - # Check input file existence - - infile_pb = fname_dict_in[in_pb_tag] - outfile_pb = fname_dict_out[out_pb_tag] - - if check_files: - if not (os.path.isdir(indir+infile_pb)): - logger.warning("Missing "+infile_pb) - return() - - logger.info("Writing from primary beam "+infile_pb) - logger.info("Writing output primary beam "+outfile_pb) - - if (not self._dry_run) and casa_enabled: - ccr.export_and_cleanup( - infile=indir+infile_pb, - outfile=outdir+outfile_pb, - overwrite=True, - remove_cards=[], - add_cards={'OBJECT':target.upper()}, - add_history=[], - zap_history=True, - round_beam=False, - roundbeam_tol=0.01, + if not self._dry_run: + ccr.align_to_target( + infile=indir + infile_pb, + outfile=outdir + outfile_pb, + template=outdir + template, + interpolation='cubic', + overwrite=True, ) - return() - - def task_convolve_parts_for_mosaic( - self, - target = None, - product = None, - config = None, - in_tag = 'pbcorr_round', - out_tag = 'linmos_commonres', - extra_ext_in = '', - extra_ext_out = '', - check_files = True, + return () + + def task_convert_units( + self, + target=None, + product=None, + config=None, + imaging_method='tclean', + in_tag='pbcorr_trimmed', + out_tag='pbcorr_trimmed_k', + extra_ext_in='', + extra_ext_out='', + check_files=True, ): - """ - For one target, config, product combination that is a linear - mosaic, convolve all of the parts of the mosaic to share a - common angular resolution, appropriate for gridding together - into a single image. - """ + """ + For one target, config, product combination convert the units + from Jy/beam to Kelvin. + """ - # Generate file names + # Generate file names - indir = self._kh.get_postprocess_dir_for_target(target) - outdir = self._kh.get_postprocess_dir_for_target(target) + indir = self._kh.get_postprocess_dir_for_target(target) + outdir = self._kh.get_postprocess_dir_for_target(target) + fname_dict_in = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_in, imaging_method=imaging_method) + fname_dict_out = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_out, imaging_method=imaging_method) - mosaic_parts = self._kh.get_parts_for_linmos(target) + infile = fname_dict_in[in_tag] + outfile = fname_dict_out[out_tag] - infile_list = [] - outfile_list = [] + # Check input file existence - for this_part in mosaic_parts: + if check_files: + if not (os.path.isdir(indir + infile)): + logger.warning("Missing " + infile) + return () - this_part_dict_in = self._fname_dict( - target=this_part, config=config, product=product, - extra_ext=extra_ext_in, - ) + # Change units from Jy/beam to Kelvin. - this_part_dict_out = self._fname_dict( - target=this_part, config=config, product=product, - extra_ext=extra_ext_out, + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Converting units for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") + + logger.info("Using ccr.convert_jytok") + logger.info("Creating " + outfile) + logger.info("Converting from original file " + infile) + + if not self._dry_run: + ccr.convert_jytok( + infile=indir + infile, + outfile=outdir + outfile, + overwrite=True, + inplace=False, ) - infile = indir+this_part_dict_in[in_tag] - infile_exists = os.path.isdir(infile) + return () + + def task_export_to_fits( + self, + target=None, + product=None, + config=None, + imaging_method='tclean', + in_tag='pbcorr_trimmed_k', + out_tag='pbcorr_trimmed_k_fits', + do_pb_too=True, + in_pb_tag='trimmed_pb', + out_pb_tag='trimmed_pb_fits', + extra_ext_in='', + extra_ext_out='', + check_files=True, + ): + """ + For one target, config, product combination export to + FITS. Optionally also export the primary beam files. + """ - outfile = outdir+this_part_dict_out[out_tag] + # Generate file names - if infile_exists: - infile_list.append(infile) - outfile_list.append(outfile) - else: - if self.raise_exception_mosaic_part_missing: - raise FileNotFoundError("Missing file "+infile) - else: - logger.warning("Missing file "+infile) - - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Convolving for mosaic for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") - - logger.info("Using cmr.common_res_for_mosaic.") - logger.info("Convolving "+target) - logger.info("Convolving original files "+str(infile_list)) - logger.info("Convolving to convolved output "+str(outfile_list)) - - # Allow overrides for the pixel padding (the - # number of pixels added to the greatest - # common beam for calculating the target - # resolution) and the target resolution. - - pixel_padding = 2.0 - target_res = None - - # TBD - check override dict for target - # resolution and (maybe?) pixel padding. - - if not self._dry_run: - cmr.common_res_for_mosaic( - infile_list = infile_list, - outfile_list = outfile_list, - do_convolve = True, - target_res = target_res, - pixel_padding = pixel_padding, - overwrite=True, - ) + indir = self._kh.get_postprocess_dir_for_target(target) + outdir = self._kh.get_postprocess_dir_for_target(target) + fname_dict_in = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_in, imaging_method=imaging_method) + fname_dict_out = self._fname_dict( + target=target, config=config, product=product, extra_ext=extra_ext_out, imaging_method=imaging_method) - return() + infile = fname_dict_in[in_tag] + outfile = fname_dict_out[out_tag] + # Check input file existence - def task_align_for_mosaic( - self, - target = None, - product = None, - config = None, - in_tags = ['linmos_commonres', 'weight', 'prepped_sd', 'sd_weight'], - out_tags = ['linmos_aligned', 'weight_aligned', 'sd_aligned', 'sd_weight_aligned'], - extra_ext_in = '', - extra_ext_out = '', - check_files = True, - ): - """ - For one target, config, product combination that is a linear - mosaic, align all parts of the mosaic to a common astrometric - grid for combination into a single image. - """ + if check_files: + if not (os.path.isdir(indir + infile)): + logger.warning("Missing " + infile) + return () - # Map the input and output tags to one another in a dictionary + # Export to FITS and clean up output - if (type(in_tags) != type([])) or type(out_tags) != type([]): - logger.error("Input and output tag lists must be lists.") - return(None) + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Exporting data to FITS and cleaning up cubes for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") - if len(in_tags) != len(out_tags): - logger.error("Mismatch in input and output list tag list.") - return(None) + logger.info("Using ccr.export_and_cleanup.") + logger.info("Export to " + outfile) + logger.info("Writing from input cube " + infile) - out_tag_dict = {} - for ii in range(len(in_tags)): - out_tag_dict[in_tags[ii]] = out_tags[ii] + if not self._dry_run: + ccr.export_and_cleanup( + infile=indir + infile, + outfile=outdir + outfile, + overwrite=True, + remove_cards=[], + add_cards={'OBJECT': target.upper()}, + add_history=[], + zap_history=True, + round_beam=True, + roundbeam_tol=0.01, + ) - # Generate file names + if do_pb_too is False: + return () - indir = self._kh.get_postprocess_dir_for_target(target) - outdir = self._kh.get_postprocess_dir_for_target(target) + # Check input file existence - mosaic_parts = self._kh.get_parts_for_linmos(target) + infile_pb = fname_dict_in[in_pb_tag] + outfile_pb = fname_dict_out[out_pb_tag] - infile_list = [] - outfile_list = [] + if check_files: + if not (os.path.isdir(indir + infile_pb)): + logger.warning("Missing " + infile_pb) + return () - for this_part in mosaic_parts: + logger.info("Writing from primary beam " + infile_pb) + logger.info("Writing output primary beam " + outfile_pb) - this_part_dict_in = self._fname_dict( - target=this_part, config=config, product=product, - extra_ext=extra_ext_in, + if not self._dry_run: + ccr.export_and_cleanup( + infile=indir + infile_pb, + outfile=outdir + outfile_pb, + overwrite=True, + remove_cards=[], + add_cards={'OBJECT': target.upper()}, + add_history=[], + zap_history=True, + round_beam=False, + roundbeam_tol=0.01, ) - this_part_dict_out = self._fname_dict( - target=this_part, config=config, product=product, - extra_ext=extra_ext_out, - ) + return () + + def task_convolve_parts_for_mosaic( + self, + target=None, + product=None, + config=None, + in_tag='pbcorr_round', + out_tag='linmos_commonres', + extra_ext_in='', + extra_ext_out='', + check_files=True, + ): + """ + For one target, config, product combination that is a linear + mosaic, convolve all of the parts of the mosaic to share a + common angular resolution, appropriate for gridding together + into a single image. + """ + + # Generate file names + + indir = self._kh.get_postprocess_dir_for_target(target) + outdir = self._kh.get_postprocess_dir_for_target(target) + + mosaic_parts = self._kh.get_parts_for_linmos(target) + + infile_list = [] + outfile_list = [] + + for this_part in mosaic_parts: - for this_tag_in in in_tags: + this_part_dict_in = self._fname_dict( + target=this_part, config=config, product=product, + extra_ext=extra_ext_in, + ) - this_tag_out = out_tag_dict[this_tag_in] + this_part_dict_out = self._fname_dict( + target=this_part, config=config, product=product, + extra_ext=extra_ext_out, + ) - infile = indir+this_part_dict_in[this_tag_in] + infile = indir + this_part_dict_in[in_tag] infile_exists = os.path.isdir(infile) - outfile = outdir+this_part_dict_out[this_tag_out] + outfile = outdir + this_part_dict_out[out_tag] if infile_exists: infile_list.append(infile) @@ -1422,669 +1307,774 @@ def task_align_for_mosaic( else: logger.warning("Missing file " + infile) - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Aligning for mosaic for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") - - logger.info("Using cmr.common_grid_for_mosaic.") - logger.info("Aligning "+target) - logger.info("Convolving original files "+str(infile_list)) - logger.info("Convolving to convolved output "+str(outfile_list)) - - # TBD implement overrides - - ra_ctr = None - dec_ctr = None - delta_ra = None - delta_dec = None - - if (not self._dry_run) and casa_enabled: - cmr.common_grid_for_mosaic( - infile_list = infile_list, - outfile_list = outfile_list, - ra_ctr = ra_ctr, - dec_ctr = dec_ctr, - delta_ra = delta_ra, - delta_dec = delta_dec, - allow_big_image = False, - too_big_pix=1e4, - asvelocity=True, - interpolation='cubic', - axes=[-1], - overwrite=True, + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Convolving for mosaic for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") + + logger.info("Using cmr.common_res_for_mosaic.") + logger.info("Convolving " + target) + logger.info("Convolving original files " + str(infile_list)) + logger.info("Convolving to convolved output " + str(outfile_list)) + + # Allow overrides for the pixel padding (the + # number of pixels added to the greatest + # common beam for calculating the target + # resolution) and the target resolution. + + pixel_padding = 2.0 + target_res = None + + # TBD - check override dict for target + # resolution and (maybe?) pixel padding. + + if not self._dry_run: + cmr.common_res_for_mosaic( + infile_list=infile_list, + outfile_list=outfile_list, + do_convolve=True, + target_res=target_res, + pixel_padding=pixel_padding, + overwrite=True, ) - return() - - def task_linear_mosaic( - self, - target = None, - product = None, - config = None, - image_tag = 'linmos_aligned', # 'sd_aligned' - weight_tag = 'weight_aligned', # 'sd_weight_aligned' - out_tag = 'pbcorr_round', # 'prepped_sd' - extra_ext_in = '', - extra_ext_out = '', - check_files = True, + return () + + def task_align_for_mosaic( + self, + target=None, + product=None, + config=None, + in_tags=['linmos_commonres', 'weight', 'prepped_sd', 'sd_weight'], + out_tags=['linmos_aligned', 'weight_aligned', 'sd_aligned', 'sd_weight_aligned'], + extra_ext_in='', + extra_ext_out='', + check_files=True, ): - """ - For one target, config, product combination that is a linear - mosaic and has already been aligned and convolved, execute the - linear mosaic. Needs to be run separately for single dish and - interferometer data. - """ + """ + For one target, config, product combination that is a linear + mosaic, align all parts of the mosaic to a common astrometric + grid for combination into a single image. + """ - # Set input and output directories and define output file + # Map the input and output tags to one another in a dictionary - indir = self._kh.get_postprocess_dir_for_target(target) - outdir = self._kh.get_postprocess_dir_for_target(target) + if (type(in_tags) != type([])) or type(out_tags) != type([]): + logger.error("Input and output tag lists must be lists.") + return (None) - fname_dict_out = self._fname_dict( - target=target, config=config, product=product, - extra_ext=extra_ext_out) + if len(in_tags) != len(out_tags): + logger.error("Mismatch in input and output list tag list.") + return (None) - outfile = fname_dict_out[out_tag] + out_tag_dict = {} + for ii in range(len(in_tags)): + out_tag_dict[in_tags[ii]] = out_tags[ii] - mosaic_parts = self._kh.get_parts_for_linmos(target) + # Generate file names - infile_list = [] - weightfile_list = [] + indir = self._kh.get_postprocess_dir_for_target(target) + outdir = self._kh.get_postprocess_dir_for_target(target) + + mosaic_parts = self._kh.get_parts_for_linmos(target) - # Get the input and weight files for individual parts. + infile_list = [] + outfile_list = [] - for this_part in mosaic_parts: + for this_part in mosaic_parts: - this_part_dict_in = self._fname_dict( - target=this_part, config=config, product=product, - extra_ext=extra_ext_in, + this_part_dict_in = self._fname_dict( + target=this_part, config=config, product=product, + extra_ext=extra_ext_in, ) - # Only include these files if both imaging and weights exist - infile = indir+this_part_dict_in[image_tag] - weightfile = indir+this_part_dict_in[weight_tag] + this_part_dict_out = self._fname_dict( + target=this_part, config=config, product=product, + extra_ext=extra_ext_out, + ) - infile_exists = os.path.isdir(infile) - weightfile_exists = os.path.isdir(weightfile) + for this_tag_in in in_tags: - if infile_exists and weightfile_exists: - infile_list.append(infile) - weightfile_list.append(weightfile) - else: - if not infile_exists: - if self.raise_exception_mosaic_part_missing: - raise FileNotFoundError("Missing file " + infile) - else: - logger.warning("Missing file " + infile) - if not weightfile_exists: - if self.raise_exception_mosaic_part_missing: - raise FileNotFoundError("Missing file " + weightfile) + this_tag_out = out_tag_dict[this_tag_in] + + infile = indir + this_part_dict_in[this_tag_in] + infile_exists = os.path.isdir(infile) + + outfile = outdir + this_part_dict_out[this_tag_out] + + if infile_exists: + infile_list.append(infile) + outfile_list.append(outfile) else: - logger.warning("Missing file "+weightfile) - - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Executing linear mosaic for:") - logger.info(str(target)+" , "+str(product)+" , "+str(config)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") - - logger.info("Using cmr.mosaic_aligned_data.") - logger.info("Creating "+outfile) - logger.info("Mosaicking original files "+str(infile_list)) - logger.info("Weighting by "+str(weightfile_list)) - - if not self._dry_run: - cmr.mosaic_aligned_data( - infile_list = infile_list, - weightfile_list = weightfile_list, - outfile = outdir+outfile, - overwrite=True) - - return() - -#endregion - -#region Recipes execute a set of linked tasks for one data set. - - def recipe_prep_one_target( - self, - target = None, - product = None, - config = None, - check_files = True, - imaging_method = 'tclean', - trim_coarse_beam_edge_channels = False, + if self.raise_exception_mosaic_part_missing: + raise FileNotFoundError("Missing file " + infile) + else: + logger.warning("Missing file " + infile) + + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Aligning for mosaic for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") + + logger.info("Using cmr.common_grid_for_mosaic.") + logger.info("Aligning " + target) + logger.info("Convolving original files " + str(infile_list)) + logger.info("Convolving to convolved output " + str(outfile_list)) + + # TBD implement overrides + + ra_ctr = None + dec_ctr = None + delta_ra = None + delta_dec = None + + if not self._dry_run: + cmr.common_grid_for_mosaic( + infile_list=infile_list, + outfile_list=outfile_list, + ra_ctr=ra_ctr, + dec_ctr=dec_ctr, + delta_ra=delta_ra, + delta_dec=delta_dec, + allow_big_image=False, + too_big_pix=1e4, + asvelocity=True, + interpolation='cubic', + axes=[-1], + overwrite=True, + ) + + return () + + def task_linear_mosaic( + self, + target=None, + product=None, + config=None, + image_tag='linmos_aligned', # 'sd_aligned' + weight_tag='weight_aligned', # 'sd_weight_aligned' + out_tag='pbcorr_round', # 'prepped_sd' + extra_ext_in='', + extra_ext_out='', + check_files=True, ): - """ - Recipe that takes data from imaging through all steps that - come before feathering and/or mosaicking. This means copying - the data, primary beam correction, convolution to a round - beam, importing and aligning the single dish data, and making - weight files for targets that are part of linear mosaicks. - - The recipe assumes a lot of file name conventions so just - needs the target/product/config defined. - """ + """ + For one target, config, product combination that is a linear + mosaic and has already been aligned and convolved, execute the + linear mosaic. Needs to be run separately for single dish and + interferometer data. + """ - # Work out file names and note whether the target is part of a - # mosaic, has single dish data, etc. + # Set input and output directories and define output file - fname_dict = self._fname_dict( - target=target, product=product, config=config, imaging_method=imaging_method) + indir = self._kh.get_postprocess_dir_for_target(target) + outdir = self._kh.get_postprocess_dir_for_target(target) - imaging_dir = self._kh.get_imaging_dir_for_target(target) - has_imaging = os.path.isdir(imaging_dir + fname_dict['orig']) - has_singledish = self._kh.has_singledish(target=target, product=product) - is_part_of_mosaic = self._kh.is_target_in_mosaic(target) + fname_dict_out = self._fname_dict( + target=target, config=config, product=product, + extra_ext=extra_ext_out) - if not has_imaging: - logger.warning("No imaging for "+fname_dict['orig']+". Returning.") - return() + outfile = fname_dict_out[out_tag] - # Call tasks + mosaic_parts = self._kh.get_parts_for_linmos(target) - self.task_stage_interf_data( - target=target, config=config, product=product, - check_files=check_files, - imaging_method=imaging_method, - trim_coarse_beam_edge_channels=trim_coarse_beam_edge_channels, - ) + infile_list = [] + weightfile_list = [] - self.task_pbcorr( - target=target, config=config, product=product, - check_files=check_files, - imaging_method=imaging_method - ) + # Get the input and weight files for individual parts. - self.task_round_beam( - target=target, config=config, product=product, - check_files=check_files, - imaging_method=imaging_method - ) + for this_part in mosaic_parts: + + this_part_dict_in = self._fname_dict( + target=this_part, config=config, product=product, + extra_ext=extra_ext_in, + ) + + # Only include these files if both imaging and weights exist + infile = indir + this_part_dict_in[image_tag] + weightfile = indir + this_part_dict_in[weight_tag] + + infile_exists = os.path.isdir(infile) + weightfile_exists = os.path.isdir(weightfile) + + if infile_exists and weightfile_exists: + infile_list.append(infile) + weightfile_list.append(weightfile) + else: + if not infile_exists: + if self.raise_exception_mosaic_part_missing: + raise FileNotFoundError("Missing file " + infile) + else: + logger.warning("Missing file " + infile) + if not weightfile_exists: + if self.raise_exception_mosaic_part_missing: + raise FileNotFoundError("Missing file " + weightfile) + else: + logger.warning("Missing file " + weightfile) + + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Executing linear mosaic for:") + logger.info(str(target) + " , " + str(product) + " , " + str(config)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") + + logger.info("Using cmr.mosaic_aligned_data.") + logger.info("Creating " + outfile) + logger.info("Mosaicking original files " + str(infile_list)) + logger.info("Weighting by " + str(weightfile_list)) + + if not self._dry_run: + cmr.mosaic_aligned_data( + infile_list=infile_list, + weightfile_list=weightfile_list, + outfile=outdir + outfile, + overwrite=True) + + return () + + # endregion + + # region Recipes execute a set of linked tasks for one data set. + + def recipe_prep_one_target( + self, + target=None, + product=None, + config=None, + check_files=True, + imaging_method='tclean', + trim_coarse_beam_edge_channels=False, + ): + """ + Recipe that takes data from imaging through all steps that + come before feathering and/or mosaicking. This means copying + the data, primary beam correction, convolution to a round + beam, importing and aligning the single dish data, and making + weight files for targets that are part of linear mosaicks. - self.task_remove_degenerate_axes( - target=target, config=config, product=product, - check_files=check_files, - imaging_method=imaging_method + The recipe assumes a lot of file name conventions so just + needs the target/product/config defined. + """ + + # Work out file names and note whether the target is part of a + # mosaic, has single dish data, etc. + + fname_dict = self._fname_dict( + target=target, product=product, config=config, imaging_method=imaging_method) + + imaging_dir = self._kh.get_imaging_dir_for_target(target) + has_imaging = os.path.isdir(imaging_dir + fname_dict['orig']) + has_singledish = self._kh.has_singledish(target=target, product=product) + is_part_of_mosaic = self._kh.is_target_in_mosaic(target) + + if not has_imaging: + logger.warning("No imaging for " + fname_dict['orig'] + ". Returning.") + return () + + # Call tasks + + self.task_stage_interf_data( + target=target, config=config, product=product, + check_files=check_files, + imaging_method=imaging_method, + trim_coarse_beam_edge_channels=trim_coarse_beam_edge_channels, ) - if has_singledish and imaging_method not in ['sdintimaging']: - self.task_stage_singledish( + self.task_pbcorr( target=target, config=config, product=product, - check_files=check_files - ) + check_files=check_files, + imaging_method=imaging_method + ) - if is_part_of_mosaic: - self.task_make_interf_weight( + self.task_round_beam( target=target, config=config, product=product, - check_files=check_files, scale_by_noise=True, + check_files=check_files, imaging_method=imaging_method - ) + ) - if is_part_of_mosaic and has_singledish and imaging_method not in ['sdintimaging']: - self.task_make_singledish_weight( + self.task_remove_degenerate_axes( target=target, config=config, product=product, check_files=check_files, + imaging_method=imaging_method + ) + + if has_singledish and imaging_method not in ['sdintimaging']: + self.task_stage_singledish( + target=target, config=config, product=product, + check_files=check_files ) - return() - - def recipe_mosaic_one_target( - self, - target = None, - product = None, - config = None, - imaging_method='tclean', - check_files = True, - extra_ext_in = '', - extra_ext_out = '', - ): - """ - Linearly mosaic a single target, performing the convolution, - alignment, and mosaicking steps. - """ + if is_part_of_mosaic: + self.task_make_interf_weight( + target=target, config=config, product=product, + check_files=check_files, scale_by_noise=True, + imaging_method=imaging_method + ) - # Check that the target is a mosaic + if is_part_of_mosaic and has_singledish and imaging_method not in ['sdintimaging']: + self.task_make_singledish_weight( + target=target, config=config, product=product, + check_files=check_files, + ) - is_mosaic = self._kh.is_target_linmos(target) + return () + + def recipe_mosaic_one_target( + self, + target=None, + product=None, + config=None, + imaging_method='tclean', + check_files=True, + extra_ext_in='', + extra_ext_out='', + ): + """ + Linearly mosaic a single target, performing the convolution, + alignment, and mosaicking steps. + """ - if not is_mosaic: - logger.warning("Not a mosaic, returning.") - return() + # Check that the target is a mosaic - mosaic_parts = self._kh.get_parts_for_linmos(target) + is_mosaic = self._kh.is_target_linmos(target) - if imaging_method == 'sdintimaging': - has_imaging = False - for mosaic_part in mosaic_parts: - fname_dict = self._fname_dict( - target=mosaic_part, product=product, config=config, - imaging_method=imaging_method) - imaging_dir = self._kh.get_imaging_dir_for_target(mosaic_part) - has_imaging = os.path.isdir(imaging_dir + fname_dict['orig']) - if has_imaging: - break - if not has_imaging: - imaging_method = 'tclean' + if not is_mosaic: + logger.warning("Not a mosaic, returning.") + return () - # Check if the individual parts have single dish data. If they - # do, flip the single dish flag to true. + mosaic_parts = self._kh.get_parts_for_linmos(target) - parts_have_singledish = False + if imaging_method == 'sdintimaging': + has_imaging = False + for mosaic_part in mosaic_parts: + fname_dict = self._fname_dict( + target=mosaic_part, product=product, config=config, + imaging_method=imaging_method) + imaging_dir = self._kh.get_imaging_dir_for_target(mosaic_part) + has_imaging = os.path.isdir(imaging_dir + fname_dict['orig']) + if has_imaging: + break + if not has_imaging: + imaging_method = 'tclean' - for this_part in mosaic_parts: + # Check if the individual parts have single dish data. If they + # do, flip the single dish flag to true. - this_part_has_sd = self._kh.has_singledish(target=this_part, product=product) + parts_have_singledish = False - if this_part_has_sd: - parts_have_singledish = True + for this_part in mosaic_parts: - # Check if this is a feather configuration. If so, then flip - # the single dish flag to false. This overrides the presence - # of data - we don't treat the singledish for feathered data. + this_part_has_sd = self._kh.has_singledish(target=this_part, product=product) - if config in self.get_feather_configs() or imaging_method in ['sdintimaging']: + if this_part_has_sd: + parts_have_singledish = True - parts_have_singledish = False + # Check if this is a feather configuration. If so, then flip + # the single dish flag to false. This overrides the presence + # of data - we don't treat the singledish for feathered data. - self.task_convolve_parts_for_mosaic( - target = target, - product = product, - config = config, - in_tag = 'pbcorr_round', - out_tag = 'linmos_commonres', - extra_ext_in = extra_ext_in, - extra_ext_out = extra_ext_in, - check_files = check_files, - ) + if config in self.get_feather_configs() or imaging_method in ['sdintimaging']: + parts_have_singledish = False - in_tag_list = ['linmos_commonres', 'weight'] - out_tag_list = ['linmos_aligned', 'weight_aligned'] - - if parts_have_singledish: - in_tag_list.append('prepped_sd') - in_tag_list.append('sd_weight') - out_tag_list.append('sd_aligned') - out_tag_list.append('sd_weight_aligned') - - self.task_align_for_mosaic( - target = target, - product = product, - config = config, - in_tags = in_tag_list, - out_tags = out_tag_list, - extra_ext_in = extra_ext_in, - extra_ext_out = extra_ext_in, - check_files = check_files, + self.task_convolve_parts_for_mosaic( + target=target, + product=product, + config=config, + in_tag='pbcorr_round', + out_tag='linmos_commonres', + extra_ext_in=extra_ext_in, + extra_ext_out=extra_ext_in, + check_files=check_files, ) - self.task_linear_mosaic( - target = target, - product = product, - config = config, - image_tag = 'linmos_aligned', - weight_tag = 'weight_aligned', - out_tag = 'pbcorr_round', - extra_ext_in = extra_ext_in, - extra_ext_out = extra_ext_out, - check_files = check_files, + in_tag_list = ['linmos_commonres', 'weight'] + out_tag_list = ['linmos_aligned', 'weight_aligned'] + + if parts_have_singledish: + in_tag_list.append('prepped_sd') + in_tag_list.append('sd_weight') + out_tag_list.append('sd_aligned') + out_tag_list.append('sd_weight_aligned') + + self.task_align_for_mosaic( + target=target, + product=product, + config=config, + in_tags=in_tag_list, + out_tags=out_tag_list, + extra_ext_in=extra_ext_in, + extra_ext_out=extra_ext_in, + check_files=check_files, ) - if parts_have_singledish: self.task_linear_mosaic( - target = target, - product = product, - config = config, - image_tag = 'sd_aligned', - weight_tag = 'sd_weight_aligned', - out_tag = 'prepped_sd', - extra_ext_in = extra_ext_in, - extra_ext_out = extra_ext_out, - check_files = check_files, + target=target, + product=product, + config=config, + image_tag='linmos_aligned', + weight_tag='weight_aligned', + out_tag='pbcorr_round', + extra_ext_in=extra_ext_in, + extra_ext_out=extra_ext_out, + check_files=check_files, + ) + + if parts_have_singledish: + self.task_linear_mosaic( + target=target, + product=product, + config=config, + image_tag='sd_aligned', + weight_tag='sd_weight_aligned', + out_tag='prepped_sd', + extra_ext_in=extra_ext_in, + extra_ext_out=extra_ext_out, + check_files=check_files, ) - return() + return () - def recipe_cleanup_one_target( - self, - target = None, - product = None, - config = None, - check_files = True, - ext_ext = '', + def recipe_cleanup_one_target( + self, + target=None, + product=None, + config=None, + check_files=True, + ext_ext='', ): - """ - Recipe that cleans up the output for one target, converting to - Kelvin, compressing and trimming the cube and then exporting - as a FITS file. - """ + """ + Recipe that cleans up the output for one target, converting to + Kelvin, compressing and trimming the cube and then exporting + as a FITS file. + """ - self.task_compress( - target=target, config=config, product=product, - check_files=check_files, do_pb_too=True, - do_trimrind=True, - extra_ext_in=ext_ext, extra_ext_out=ext_ext + self.task_compress( + target=target, config=config, product=product, + check_files=check_files, do_pb_too=True, + do_trimrind=True, + extra_ext_in=ext_ext, extra_ext_out=ext_ext ) - self.task_convert_units( - target=target, config=config, product=product, - check_files=check_files, - extra_ext_in=ext_ext, extra_ext_out=ext_ext + self.task_convert_units( + target=target, config=config, product=product, + check_files=check_files, + extra_ext_in=ext_ext, extra_ext_out=ext_ext ) - self.task_export_to_fits( - target=target, config=config, product=product, - check_files=check_files, do_pb_too=True, - extra_ext_in=ext_ext, extra_ext_out=ext_ext + self.task_export_to_fits( + target=target, config=config, product=product, + check_files=check_files, do_pb_too=True, + extra_ext_in=ext_ext, extra_ext_out=ext_ext ) - return() + return () - def recipe_convolve_to_scale( - self, - target = None, - product = None, - config = None, - ext_ext = '', - export_to_fits = True, - check_files = True, + def recipe_convolve_to_scale( + self, + target=None, + product=None, + config=None, + ext_ext='', + export_to_fits=True, + check_files=True, ): - """ - Convolve a target, product, config combination to a succession - of angulars scale using the task that convolves to a round - beam. - """ + """ + Convolve a target, product, config combination to a succession + of angulars scale using the task that convolves to a round + beam. + """ - res_list = self._kh.get_res_for_config(config) - if res_list is None: - logger.error("No target resolutions found for config "+config) - return() + res_list = self._kh.get_res_for_config(config) + if res_list is None: + logger.error("No target resolutions found for config " + config) + return () - for this_res in res_list: - check_target_is_part, target_name = self._kh.is_target_in_mosaic(target, return_target_name=True) + for this_res in res_list: + check_target_is_part, target_name = self._kh.is_target_in_mosaic(target, return_target_name=True) - #res_tag = self._kh.get_tag_for_res(this_res) - res_tag = utilsResolutions.get_tag_for_res(this_res) - res_arcsec = utilsResolutions.get_angular_resolution_for_res(this_res, distance = self._kh.get_distance_for_target(target_name)) + # res_tag = self._kh.get_tag_for_res(this_res) + res_tag = utilsResolutions.get_tag_for_res(this_res) + res_arcsec = utilsResolutions.get_angular_resolution_for_res(this_res, + distance=self._kh.get_distance_for_target( + target_name)) - # Check if the requested beam is smaller than the current one + # Check if the requested beam is smaller than the current one - self.task_round_beam( - target=target, config=config, product=product, - in_tag = 'pbcorr_trimmed_k', out_tag = 'pbcorr_trimmed_k', - extra_ext_in=ext_ext, extra_ext_out=ext_ext+'_res'+res_tag, - force_beam_as=res_arcsec, - check_files=check_files + self.task_round_beam( + target=target, config=config, product=product, + in_tag='pbcorr_trimmed_k', out_tag='pbcorr_trimmed_k', + extra_ext_in=ext_ext, extra_ext_out=ext_ext + '_res' + res_tag, + force_beam_as=res_arcsec, + check_files=check_files ) - if export_to_fits: - self.task_export_to_fits( - target=target, config=config, product=product, - check_files=check_files, do_pb_too=True, - extra_ext_in=ext_ext+'_res'+res_tag, extra_ext_out=ext_ext+'_res'+res_tag, + if export_to_fits: + self.task_export_to_fits( + target=target, config=config, product=product, + check_files=check_files, do_pb_too=True, + extra_ext_in=ext_ext + '_res' + res_tag, extra_ext_out=ext_ext + '_res' + res_tag, ) -#endregion - -#region Loops - - def loop_postprocess( - self, - imaging_method='tclean', - do_all=False, - do_prep=False, - do_feather=False, - do_mosaic=False, - do_cleanup=False, - do_summarize=False, - trim_coarse_beam_edge_channels=False, - feather_apod=False, - feather_noapod=False, - feather_before_mosaic=False, - make_directories=True, + # endregion + + # region Loops + + def loop_postprocess( + self, + imaging_method='tclean', + do_all=False, + do_prep=False, + do_feather=False, + do_mosaic=False, + do_cleanup=False, + do_summarize=False, + trim_coarse_beam_edge_channels=False, + feather_apod=False, + feather_noapod=False, + feather_before_mosaic=False, + make_directories=True, ): - """ - Loops over the full set of targets, products, and - configurations to run the postprocessing. Toggle the parts of - the loop using the do_XXX booleans. Other choices affect the - algorithms used. - """ - - if do_all: - do_prep = True - do_feather=True - do_mosaic=True - do_cleanup=True - do_summarize=True - - if feather_apod is False and feather_noapod is False: - logger.info("Defaulting to no apodization.") - feather_noapod = True + """ + Loops over the full set of targets, products, and + configurations to run the postprocessing. Toggle the parts of + the loop using the do_XXX booleans. Other choices affect the + algorithms used. + """ + + if do_all: + do_prep = True + do_feather = True + do_mosaic = True + do_cleanup = True + do_summarize = True + + if feather_apod is False and feather_noapod is False: + logger.info("Defaulting to no apodization.") + feather_noapod = True + + if len(self.get_targets()) == 0: + logger.error("Need a target list.") + return (None) + + if len(self.get_all_products()) == 0: + logger.error("Need a products list.") + return (None) + + if make_directories: + self._kh.make_missing_directories(postprocess=True) + + # Prepare the interferometer data that has imaging for further + # postprocessing. Includes staging the single dish data, + # making weights, etc. These are in the recipe_prep_one_target + + if do_prep: + + for this_target, this_product, this_config in \ + self.looper(do_targets=True, do_products=True, do_configs=True): + + has_imaging = False + + if imaging_method == 'sdintimaging': + fname_dict = self._fname_dict( + target=this_target, product=this_product, config=this_config, + imaging_method=imaging_method) + imaging_dir = self._kh.get_imaging_dir_for_target(this_target) + has_imaging = os.path.isdir(imaging_dir + fname_dict['orig']) + if has_imaging: + imaging_method_prep = 'sdintimaging' + + if not has_imaging: + fname_dict = self._fname_dict( + target=this_target, product=this_product, config=this_config) + imaging_dir = self._kh.get_imaging_dir_for_target(this_target) + has_imaging = os.path.isdir(imaging_dir + fname_dict['orig']) + if has_imaging: + imaging_method_prep = 'tclean' + + if not has_imaging: + logger.debug("Skipping " + this_target + " because it lacks imaging.") + logger.debug(imaging_dir + fname_dict['orig']) + continue - if len(self.get_targets()) == 0: - logger.error("Need a target list.") - return(None) + self.recipe_prep_one_target( + target=this_target, product=this_product, config=this_config, + check_files=True, + trim_coarse_beam_edge_channels=trim_coarse_beam_edge_channels, + imaging_method=imaging_method_prep) - if len(self.get_all_products()) == 0: - logger.error("Need a products list.") - return(None) + # Feather the interferometer configuration data that has + # single dish imaging. We'll return to feather mosaicked + # intereferometer and single dish data in the next steps. - if make_directories: - self._kh.make_missing_directories(postprocess=True) + if do_feather: - # Prepare the interferometer data that has imaging for further - # postprocessing. Includes staging the single dish data, - # making weights, etc. These are in the recipe_prep_one_target + for this_target, this_product, this_config in \ + self.looper(do_targets=True, do_products=True, do_configs=True, just_interf=True): - if do_prep: + is_mosaic = self._kh.is_target_linmos(this_target) + if is_mosaic: + continue - for this_target, this_product, this_config in \ - self.looper(do_targets=True,do_products=True,do_configs=True): + if imaging_method == 'sdintimaging': + fname_dict = self._fname_dict( + target=this_target, product=this_product, config=this_config, + imaging_method=imaging_method) + imaging_dir = self._kh.get_imaging_dir_for_target(this_target) + using_sdint = os.path.isdir(imaging_dir + fname_dict['orig']) + if using_sdint: + logger.debug('Skipping feathering for %s, %s, %s because using sdintimaging' % + (this_target, this_product, this_config)) + continue - has_imaging = False - - if imaging_method == 'sdintimaging': - fname_dict = self._fname_dict( - target=this_target, product=this_product, config=this_config, - imaging_method=imaging_method) - imaging_dir = self._kh.get_imaging_dir_for_target(this_target) - has_imaging = os.path.isdir(imaging_dir + fname_dict['orig']) - if has_imaging: - imaging_method_prep = 'sdintimaging' - - if not has_imaging: fname_dict = self._fname_dict( target=this_target, product=this_product, config=this_config) + imaging_dir = self._kh.get_imaging_dir_for_target(this_target) has_imaging = os.path.isdir(imaging_dir + fname_dict['orig']) - if has_imaging: - imaging_method_prep = 'tclean' - - if not has_imaging: - logger.debug("Skipping "+this_target+" because it lacks imaging.") - logger.debug(imaging_dir+fname_dict['orig']) - continue + has_singledish = self._kh.has_singledish(target=this_target, product=this_product) - self.recipe_prep_one_target( - target = this_target, product = this_product, config = this_config, - check_files = True, - trim_coarse_beam_edge_channels = trim_coarse_beam_edge_channels, - imaging_method = imaging_method_prep) - - # Feather the interferometer configuration data that has - # single dish imaging. We'll return to feather mosaicked - # intereferometer and single dish data in the next steps. - - if do_feather: - - for this_target, this_product, this_config in \ - self.looper(do_targets=True,do_products=True,do_configs=True,just_interf=True): - - is_mosaic = self._kh.is_target_linmos(this_target) - if is_mosaic: - continue - - if imaging_method == 'sdintimaging': - fname_dict = self._fname_dict( - target=this_target, product=this_product, config=this_config, - imaging_method=imaging_method) - imaging_dir = self._kh.get_imaging_dir_for_target(this_target) - using_sdint = os.path.isdir(imaging_dir + fname_dict['orig']) - if using_sdint: - logger.debug('Skipping feathering for %s, %s, %s because using sdintimaging' % - (this_target, this_product, this_config)) + is_part_of_mosaic = self._kh.is_target_in_mosaic(this_target) + if is_part_of_mosaic and not feather_before_mosaic: + logger.debug("Skipping " + this_target + " because feather_before_mosaic is False.") continue - fname_dict = self._fname_dict( - target=this_target, product=this_product, config=this_config) - - imaging_dir = self._kh.get_imaging_dir_for_target(this_target) - has_imaging = os.path.isdir(imaging_dir + fname_dict['orig']) - has_singledish = self._kh.has_singledish(target=this_target, product=this_product) - - is_part_of_mosaic = self._kh.is_target_in_mosaic(this_target) - if is_part_of_mosaic and not feather_before_mosaic: - logger.debug("Skipping "+this_target+" because feather_before_mosaic is False.") - continue - - if not has_imaging: - logger.debug("Skipping "+this_target+" because it lacks imaging.") - logger.debug(imaging_dir+fname_dict['orig']) - continue + if not has_imaging: + logger.debug("Skipping " + this_target + " because it lacks imaging.") + logger.debug(imaging_dir + fname_dict['orig']) + continue - if not has_singledish: - logger.debug("Skipping "+this_target+" because it lacks single dish.") - continue + if not has_singledish: + logger.debug("Skipping " + this_target + " because it lacks single dish.") + continue - if feather_apod: - self.task_feather( - target = this_target, product = this_product, config = this_config, - apodize=True, apod_ext='pb',extra_ext_out='_apod',check_files=True, - copy_weights=True, + if feather_apod: + self.task_feather( + target=this_target, product=this_product, config=this_config, + apodize=True, apod_ext='pb', extra_ext_out='_apod', check_files=True, + copy_weights=True, ) - if feather_noapod: - self.task_feather( - target = this_target, product = this_product, config = this_config, - apodize=False, extra_ext_out='',check_files=True, - copy_weights=True, + if feather_noapod: + self.task_feather( + target=this_target, product=this_product, config=this_config, + apodize=False, extra_ext_out='', check_files=True, + copy_weights=True, ) - # Mosaic the interferometer, single dish, and feathered data. + # Mosaic the interferometer, single dish, and feathered data. - if do_mosaic: + if do_mosaic: - # Loop over interferometer configurations + # Loop over interferometer configurations - for this_target, this_product, this_config in \ - self.looper(do_targets=True,do_products=True,do_configs=True,just_interf=True): + for this_target, this_product, this_config in \ + self.looper(do_targets=True, do_products=True, do_configs=True, just_interf=True): - is_mosaic = self._kh.is_target_linmos(this_target) - if not is_mosaic: - continue + is_mosaic = self._kh.is_target_linmos(this_target) + if not is_mosaic: + continue - if feather_before_mosaic: - logger.debug("Skipping "+this_target+" because feather_before_mosaic is True.") - continue + if feather_before_mosaic: + logger.debug("Skipping " + this_target + " because feather_before_mosaic is True.") + continue + + # Mosaic the interferometer data and the + # single dish data (need to verify if parts + # have single dish, enforce the same + # astrometric grid). - # Mosaic the interferometer data and the - # single dish data (need to verify if parts - # have single dish, enforce the same - # astrometric grid). - - self.recipe_mosaic_one_target( - target = this_target, product = this_product, config = this_config, - check_files = True, - imaging_method=imaging_method, - extra_ext_in = '', - extra_ext_out = '', + self.recipe_mosaic_one_target( + target=this_target, product=this_product, config=this_config, + check_files=True, + imaging_method=imaging_method, + extra_ext_in='', + extra_ext_out='', ) - # Loop over feather configurations + # Loop over feather configurations - for this_target, this_product, this_config in \ - self.looper(do_targets=True,do_products=True,do_configs=True,just_feather=True): + for this_target, this_product, this_config in \ + self.looper(do_targets=True, do_products=True, do_configs=True, just_feather=True): - is_mosaic = self._kh.is_target_linmos(this_target) - if not is_mosaic: - continue + is_mosaic = self._kh.is_target_linmos(this_target) + if not is_mosaic: + continue - if feather_apod: - self.recipe_mosaic_one_target( - target = this_target, product = this_product, config = this_config, - check_files = True, - extra_ext_in = '_apod', - extra_ext_out = '', + if feather_apod: + self.recipe_mosaic_one_target( + target=this_target, product=this_product, config=this_config, + check_files=True, + extra_ext_in='_apod', + extra_ext_out='', ) - if feather_noapod: - self.recipe_mosaic_one_target( - target = this_target, product = this_product, config = this_config, - check_files = True, - extra_ext_in = '', - extra_ext_out = '', + if feather_noapod: + self.recipe_mosaic_one_target( + target=this_target, product=this_product, config=this_config, + check_files=True, + extra_ext_in='', + extra_ext_out='', ) - # This round of feathering targets only mosaicked data. All - # other data have been feathered above already. + # This round of feathering targets only mosaicked data. All + # other data have been feathered above already. - if do_feather and feather_before_mosaic: + if do_feather and feather_before_mosaic: - # N.B. if using sdintimaging this will just crash out since it hasn't staged any singledish. This is - # intended! + # N.B. if using sdintimaging this will just crash out since it hasn't staged any singledish. This is + # intended! - for this_target, this_product, this_config in \ - self.looper(do_targets=True,do_products=True,do_configs=True,just_interf=True): + for this_target, this_product, this_config in \ + self.looper(do_targets=True, do_products=True, do_configs=True, just_interf=True): - # Skip if we're not a mosaic - is_mosaic = self._kh.is_target_linmos(this_target) - if not is_mosaic: - continue + # Skip if we're not a mosaic + is_mosaic = self._kh.is_target_linmos(this_target) + if not is_mosaic: + continue - if feather_apod: - self.task_feather( - target = this_target, product = this_product, config = this_config, - apodize=True, apod_ext='pb',extra_ext_out='_apod',check_files=True, + if feather_apod: + self.task_feather( + target=this_target, product=this_product, config=this_config, + apodize=True, apod_ext='pb', extra_ext_out='_apod', check_files=True, ) - if feather_noapod: - self.task_feather( - target = this_target, product = this_product, config = this_config, - apodize=False, extra_ext_out='',check_files=True, + if feather_noapod: + self.task_feather( + target=this_target, product=this_product, config=this_config, + apodize=False, extra_ext_out='', check_files=True, ) - # Trim and downsample the data, convert to Kelvin, etc. - - if do_cleanup: - - for this_target, this_product, this_config in \ - self.looper(do_targets=True, - do_products=True, - do_configs=True): + # Trim and downsample the data, convert to Kelvin, etc. - # At this point, if using sdintimaging rename all the interf config files to their associated feathered - # config files + if do_cleanup: - if imaging_method == 'sdintimaging': + for this_target, this_product, this_config in \ + self.looper(do_targets=True, + do_products=True, + do_configs=True): - self.task_rename_sdintimaging(target=this_target, product=this_product, config=this_config, - imaging_method=imaging_method) + # At this point, if using sdintimaging rename all the interf config files to their associated feathered + # config files - self.recipe_cleanup_one_target( - target = this_target, - product = this_product, - config = this_config, - check_files = True) + if imaging_method == 'sdintimaging': + self.task_rename_sdintimaging(target=this_target, product=this_product, config=this_config, + imaging_method=imaging_method) - # Build reports summarizing the properties of the final - # postprocessed data. + self.recipe_cleanup_one_target( + target=this_target, + product=this_product, + config=this_config, + check_files=True) - if do_summarize: + # Build reports summarizing the properties of the final + # postprocessed data. - pass + if do_summarize: + pass -#endregion + # endregion diff --git a/phangsPipeline/handlerRelease.py b/phangsPipeline/handlerRelease.py index 54e4400f..663f0014 100644 --- a/phangsPipeline/handlerRelease.py +++ b/phangsPipeline/handlerRelease.py @@ -17,30 +17,17 @@ this_hr.set_targets(only=['ngc4321']) """ -import os, sys, re, shutil -import glob import logging +import os +import shutil -import numpy as np +from . import handlerTemplate +from . import utilsFilenames +from . import utilsResolutions logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) -# Check casa environment by importing CASA-only packages -from .casa_check import is_casa_installed -casa_enabled = is_casa_installed() - - -if casa_enabled: - logger.debug('casa_enabled = True') -else: - logger.debug('casa_enabled = False') - -from . import utilsResolutions -from . import utilsFilenames -from . import utilsLines -from . import handlerTemplate - class ReleaseHandler(handlerTemplate.HandlerTemplate): """ diff --git a/phangsPipeline/handlerSingleDish.py b/phangsPipeline/handlerSingleDish.py index bcaf85a1..8799ec7b 100644 --- a/phangsPipeline/handlerSingleDish.py +++ b/phangsPipeline/handlerSingleDish.py @@ -9,406 +9,399 @@ calls to CASA from this class. """ -import os, sys, re, shutil import glob -import numpy as np - import logging -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - +import os +import shutil # Check casa environment by importing CASA-only packages -from .casa_check import is_casa_installed +from .check_imports import is_casa_installed casa_enabled = is_casa_installed() +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) + if casa_enabled: logger.debug('casa_enabled = True') - from . import casaLegacySingleDishRoutines as csdr +else: + logger.debug('casa_enabled = False') + +if casa_enabled: + from . import casaLegacySingleDishRoutines as csdr from . import casaSingleDishALMAWrapper as sdalma + from . import handlerTemplate + from . import utilsLines -else: - logger.debug('casa_enabled = False') + class SingleDishHandler(handlerTemplate.HandlerTemplate): + """ + Class to handle single dish data. + """ -from . import handlerTemplate -from . import utilsFilenames -from . import utilsLines + def __init__( + self, + key_handler = None, + dry_run = False, + use_legacy_pipeline=False, + ): + handlerTemplate.HandlerTemplate.__init__(self, key_handler = key_handler, dry_run = dry_run) + + self.use_legacy_pipeline = use_legacy_pipeline + + #region File name routines + + ########################################### + # Defined file names for various products # + ########################################### + + def _fname_dict( + self, + target=None, + product=None, + extra_ext='', + ): + """ + Make the file name dictionary for all files used + in the process. + """ + + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Error checking + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + + if target is None: + logger.error("Need a target.") + return + if product is None: + logger.error("Need a product.") + return -class SingleDishHandler(handlerTemplate.HandlerTemplate): - """ - Class to handle single dish data. - """ + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Initialize + # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - def __init__( - self, - key_handler = None, - dry_run = False, - use_legacy_pipeline=False, - ): - # Can't use super and keep python2/3 agnostic - handlerTemplate.HandlerTemplate.__init__(self, key_handler = key_handler, dry_run = dry_run) + fname_dict = {} - self.use_legacy_pipeline = use_legacy_pipeline + # single dish file -#region File name routines + has_sd = self._kh.has_singledish(target=target, product=product) + tag = 'sd_file' + fname_dict[tag] = '' + if has_sd: + sd_file = self._kh.get_sd_filename(target = target, product = product, nocheck = True) + if sd_file is not None: + fname_dict[tag] = sd_file - ########################################### - # Defined file names for various products # - ########################################### + fname_dict['source'] = [] - def _fname_dict( - self, - target=None, - product=None, - extra_ext='', - ): - """ - Make the file name dictionary for all files used - in the process. - """ + tag = 'sd_raw_data_list' + fname_dict[tag] = [] + if has_sd: + for this_target, this_project, this_arraytag, this_obsnum in self._kh.loop_over_input_ms(target=target, config='tp'): + sd_file = self._kh.get_file_for_input_ms(target=this_target, + project=this_project, + array_tag=this_arraytag, + obsnum=this_obsnum) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Error checking - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - if target is None: - logger.error("Need a target.") - return + source = self._kh.get_field_for_input_ms(target=this_target, + project=this_project, + array_tag=this_arraytag, + obsnum=this_obsnum, + ) - if product is None: - logger.error("Need a product.") - return + fname_dict[tag].append(sd_file) + fname_dict['source'].append(source) - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% - # Initialize - # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% + # Return - fname_dict = {} + return(fname_dict) - # single dish file + #endregion - has_sd = self._kh.has_singledish(target=target, product=product) - tag = 'sd_file' - fname_dict[tag] = '' - if has_sd: - sd_file = self._kh.get_sd_filename(target = target, product = product, nocheck = True) - if sd_file is not None: - fname_dict[tag] = sd_file + #region "Tasks" : Individual steps - fname_dict['source'] = [] + def task_execute_single_dish_pipeline( + self, + target, + product='all', + source='all', + extra_ext_in='', + extra_ext_out='', + line_wing_kms=200.0, + ): + """ + Execute single dish data reduction for one target. + """ - tag = 'sd_raw_data_list' - fname_dict[tag] = [] - if has_sd: - for this_target, this_project, this_arraytag, this_obsnum in self._kh.loop_over_input_ms(target=target, config='tp'): - sd_file = self._kh.get_file_for_input_ms(target=this_target, - project=this_project, - array_tag=this_arraytag, - obsnum=this_obsnum) + if product == 'all': + product_list = self.get_line_products() + else: + product_list = [product] + fname_dict = self._fname_dict( + target=target, + product=product_list[0], + ) + + if fname_dict['sd_file'] == '': + logger.info("Target "+target+" product "+product+" has no single dish data in the singledish_key file.") - source = self._kh.get_field_for_input_ms(target=this_target, - project=this_project, - array_tag=this_arraytag, - obsnum=this_obsnum, - ) + if len(fname_dict['sd_raw_data_list']) > 1: + logger.warning('Warning! Multiple single dish raw data entries are found in the ms_file_key! We will only process the first one! [TODO]') + ## We can only process one single dish raw data for now. Not sure how to combine those. Unless we specify line_product in the ms_file_key? - fname_dict[tag].append(sd_file) - fname_dict['source'].append(source) + input_raw_data = fname_dict['sd_raw_data_list'][0] - # Return + # Legacy pipeline doesn't handle multiple line products. + if self.use_legacy_pipeline: + logger.warning('Using legacy single dish pipeline') + logger.warning(f'Only the first line product will be processed: {product_list[0]}') - return(fname_dict) + product_list = [product_list[0]] -#endregion + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Executing single dish pipeline") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info(" target: "+str(target)) + logger.info(" product: "+str(product_list)) + logger.info(" raw data: "+str(input_raw_data)) + # logger.info(" output file: "+str(output_file)) -#region "Tasks" : Individual steps + # if os.path.isfile(output_file): + # logger.info('Output file already exists: '+str(output_file)+'. Will not re-process it.') + # return - def task_execute_single_dish_pipeline( - self, - target, - product='all', - source='all', - extra_ext_in='', - extra_ext_out='', - line_wing_kms=200.0, - ): - """ - Execute single dish data reduction for one target. - """ + for product in product_list: + if product not in self._kh.get_line_products(): + logger.error('Error! Product '+str(product)+' is not defined in the config_definitions key?!') + return - if product == 'all': - product_list = self.get_line_products() - else: - product_list = [product] + parameters = self._kh.get_params_for_singledish(singledish_config='tp') - fname_dict = self._fname_dict( - target=target, - product=product_list[0], - ) + product_dict = {} + for product in product_list: - if fname_dict['sd_file'] == '': - logger.info("Target "+target+" product "+product+" has no single dish data in the singledish_key file.") + fname_dict = self._fname_dict( + target=target, + product=product, + ) + output_file = fname_dict['sd_file'] - if len(fname_dict['sd_raw_data_list']) > 1: - logger.warning('Warning! Multiple single dish raw data entries are found in the ms_file_key! We will only process the first one! [TODO]') - ## We can only process one single dish raw data for now. Not sure how to combine those. Unless we specify line_product in the ms_file_key? + product_dict[product] = self._kh._config_dict['line_product'][product] - input_raw_data = fname_dict['sd_raw_data_list'][0] + this_line = self._kh.get_line_tag_for_line_product(product) + vsys, vwidth = self._kh.get_system_velocity_and_velocity_width_for_target(target, check_parent=False) + max_chanwidth_kms = self._kh.get_channel_width_for_line_product(product) - # Legacy pipeline doesn't handle multiple line products. - if self.use_legacy_pipeline: - logger.warning('Using legacy single dish pipeline') - logger.warning(f'Only the first line product will be processed: {product_list[0]}') + #joint_imaging_dirs = self._kh.get_joint_imaging_dirs_for_singledish_config() + #joint_imaging_suffix = self._kh.get_joint_imaging_suffix_for_singledish_config() - product_list = [product_list[0]] + line_wing = line_wing_kms # km/s + vlow1 = vsys - vwidth/2.0 + vhigh1 = vsys + vwidth/2.0 + vlow2 = vsys - vwidth/2.0 - line_wing + vhigh2 = vsys + vwidth/2.0 + line_wing - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Executing single dish pipeline") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info(" target: "+str(target)) - logger.info(" product: "+str(product_list)) - logger.info(" raw data: "+str(input_raw_data)) - # logger.info(" output file: "+str(output_file)) + line_name, freq_rest_GHz = utilsLines.get_line_name_and_frequency(this_line) + freq_rest_MHz = freq_rest_GHz * 1e3 # MHz - # if os.path.isfile(output_file): - # logger.info('Output file already exists: '+str(output_file)+'. Will not re-process it.') - # return + rastring, decstring = self._kh.get_phasecenter_for_target(target=target) + phase_center = 'J2000 '+rastring+' '+decstring - for product in product_list: - if product not in self._kh.get_line_products(): - logger.error('Error! Product '+str(product)+' is not defined in the config_definitions key?!') - return + name_line = line_name.upper() + '_%.0fkmsres'%(max_chanwidth_kms) - parameters = self._kh.get_params_for_singledish(singledish_config='tp') + # Add to product dict: + product_dict[product]['line_name'] = line_name + product_dict[product]['freq_rest_MHz'] = freq_rest_MHz + product_dict[product]['vel_line_mask'] = [vlow2, vhigh2] + product_dict[product]['vel_cube'] = [vlow1, vhigh1] + product_dict[product]['name_line'] = name_line + product_dict[product]['phase_center'] = phase_center - product_dict = {} - for product in product_list: + product_dict[product]['max_chanwidth_kms'] = max_chanwidth_kms + product_dict[product]['vsys'] = vsys + product_dict[product]['vwidth'] = vwidth - fname_dict = self._fname_dict( - target=target, - product=product, - ) - output_file = fname_dict['sd_file'] - - product_dict[product] = self._kh._config_dict['line_product'][product] - - this_line = self._kh.get_line_tag_for_line_product(product) - vsys, vwidth = self._kh.get_system_velocity_and_velocity_width_for_target(target, check_parent=False) - max_chanwidth_kms = self._kh.get_channel_width_for_line_product(product) - - #joint_imaging_dirs = self._kh.get_joint_imaging_dirs_for_singledish_config() - #joint_imaging_suffix = self._kh.get_joint_imaging_suffix_for_singledish_config() - - line_wing = line_wing_kms # km/s - vlow1 = vsys - vwidth/2.0 - vhigh1 = vsys + vwidth/2.0 - vlow2 = vsys - vwidth/2.0 - line_wing - vhigh2 = vsys + vwidth/2.0 + line_wing - - line_name, freq_rest_GHz = utilsLines.get_line_name_and_frequency(this_line) - freq_rest_MHz = freq_rest_GHz * 1e3 # MHz - - rastring, decstring = self._kh.get_phasecenter_for_target(target=target) - phase_center = 'J2000 '+rastring+' '+decstring - - name_line = line_name.upper() + '_%.0fkmsres'%(max_chanwidth_kms) - - # Add to product dict: - product_dict[product]['line_name'] = line_name - product_dict[product]['freq_rest_MHz'] = freq_rest_MHz - product_dict[product]['vel_line_mask'] = [vlow2, vhigh2] - product_dict[product]['vel_cube'] = [vlow1, vhigh1] - product_dict[product]['name_line'] = name_line - product_dict[product]['phase_center'] = phase_center - - product_dict[product]['max_chanwidth_kms'] = max_chanwidth_kms - product_dict[product]['vsys'] = vsys - product_dict[product]['vwidth'] = vwidth - - product_dict[product]['output_file'] = output_file - - for key in parameters: - if key not in product_dict[product]: - product_dict[product][key] = parameters[key] - - - # copy raw data over - path_galaxy = self._kh.get_singledish_dir_for_target(target=target, changeto=False) + os.sep + 'processing_singledish_'+target + os.sep - path_galaxy = os.path.abspath(path_galaxy) + os.sep - input_raw_data = os.path.abspath(input_raw_data) + os.sep - if not os.path.isdir(path_galaxy): - os.makedirs(path_galaxy) - for dir_to_copy in ['calibration', 'raw', 'script', 'qa']: - if os.path.isdir(os.path.join(path_galaxy, dir_to_copy)): - input_raw_data_files = glob.glob(os.path.join(input_raw_data, dir_to_copy)) - copied_raw_data_files = glob.glob(os.path.join(path_galaxy, dir_to_copy)) - if len(input_raw_data_files) > len(copied_raw_data_files): - logger.info(" cleaning up: "+str(os.path.join(path_galaxy, dir_to_copy))) - shutil.rmtree(glob.glob(os.path.join(path_galaxy, dir_to_copy))) - if not os.path.isdir(os.path.join(path_galaxy, dir_to_copy)): - logger.info(" copying raw data: "+str(os.path.join(input_raw_data, dir_to_copy))) - logger.info(" to processing dir: "+str(os.path.join(path_galaxy, dir_to_copy))) - shutil.copytree(os.path.join(input_raw_data, dir_to_copy), \ - os.path.join(path_galaxy, dir_to_copy)) - - - if self.use_legacy_pipeline: - logger.info(" Using legacy pipeline") - - product = product_list[0] - - vlow2 = product_dict[product]['vel_cube'][0] - vhigh2 = product_dict[product]['vel_cube'][1] - - vlow1 = product_dict[product]['vel_line_mask'][0] - vhigh1 = product_dict[product]['vel_line_mask'][1] - - kwargs = {} - kwargs['path_galaxy'] = path_galaxy # - kwargs['flag_file'] = '' # - kwargs['doplots'] = False # Do non-interactive. additional plots (plots will be saved in "calibration/plots" folder) - kwargs['bl_order'] = 1 # Order for the baseline fitting - kwargs['max_flag_frac'] = 0.9 # Remove antennae with significant amounts of flagged data - kwargs['in_source'] = source # Source name. This comes from the field name in the MS file keys - kwargs['freq_rest'] = product_dict[product]['freq_rest_MHz'] # Rest frequency of requested line in MHz (ex: "freq_rest = 230538" for CO(2-1)) - kwargs['vel_cube'] = vel_cube = '%.3f~%.3f'%(vlow2, vhigh2) # Range in velocity in km/s to extract the line cube. - kwargs['vel_line'] = vel_line = '%.3f~%.3f'%(vlow1, vhigh1) # Range in velocity in km/s to exclude the line emission from the baseline fit. - kwargs['phase_center'] = phase_center # Provide coordinate of phase center, otherwise set to "False" and coordinates will be read from the data - kwargs['source_vel_kms'] = product_dict[product]['vsys'] # Provide velocity of the source, otherwise set to "False" and coordinates will be read from the data - kwargs['vwidth_kms'] = product_dict[product]['vwidth'] # width in velocity and velocity resolution in km/s - kwargs['chan_dv_kms'] = product_dict[product]['max_chanwidth_kms'] # - kwargs['freq_rest_im'] = product_dict[product]['freq_rest_GHz'] # rest frequency in GHz for imaging - kwargs['name_line'] = product_dict[product]['line_name'] # Name of the line, to be used for naming the files -- will not be used anymore - kwargs['output_file'] = product_dict[product]['output_file'] # Output file path - #kwargs['joint_imaging_dirs'] = joint_imaging_dirs # Do a joint imaging by including *.cal.jy in joint_imaging_dir - #kwargs['joint_imaging_suffix'] = joint_imaging_suffix # Suffix after name_line in the output file name. - kwargs['do_step'] = [] - - # see if there is anything defined in the config_definitions key - parameters = self._kh.get_params_for_singledish(singledish_config='tp') + product_dict[product]['output_file'] = output_file - if parameters is not None: for key in parameters: - kwargs[key] = parameters[key] + if key not in product_dict[product]: + product_dict[product][key] = parameters[key] + + + # copy raw data over + path_galaxy = self._kh.get_singledish_dir_for_target(target=target, changeto=False) + os.sep + 'processing_singledish_'+target + os.sep + path_galaxy = os.path.abspath(path_galaxy) + os.sep + input_raw_data = os.path.abspath(input_raw_data) + os.sep + if not os.path.isdir(path_galaxy): + os.makedirs(path_galaxy) + for dir_to_copy in ['calibration', 'raw', 'script', 'qa']: + if os.path.isdir(os.path.join(path_galaxy, dir_to_copy)): + input_raw_data_files = glob.glob(os.path.join(input_raw_data, dir_to_copy)) + copied_raw_data_files = glob.glob(os.path.join(path_galaxy, dir_to_copy)) + if len(input_raw_data_files) > len(copied_raw_data_files): + logger.info(" cleaning up: "+str(os.path.join(path_galaxy, dir_to_copy))) + shutil.rmtree(glob.glob(os.path.join(path_galaxy, dir_to_copy))) + if not os.path.isdir(os.path.join(path_galaxy, dir_to_copy)): + logger.info(" copying raw data: "+str(os.path.join(input_raw_data, dir_to_copy))) + logger.info(" to processing dir: "+str(os.path.join(path_galaxy, dir_to_copy))) + shutil.copytree(os.path.join(input_raw_data, dir_to_copy), \ + os.path.join(path_galaxy, dir_to_copy)) - logger.info(" kwargs: "+str(kwargs)) - csdr.run_ALMA_TP_tools(**kwargs) + if self.use_legacy_pipeline: + logger.info(" Using legacy pipeline") + + product = product_list[0] + + vlow2 = product_dict[product]['vel_cube'][0] + vhigh2 = product_dict[product]['vel_cube'][1] + + vlow1 = product_dict[product]['vel_line_mask'][0] + vhigh1 = product_dict[product]['vel_line_mask'][1] + + kwargs = {} + kwargs['path_galaxy'] = path_galaxy # + kwargs['flag_file'] = '' # + kwargs['doplots'] = False # Do non-interactive. additional plots (plots will be saved in "calibration/plots" folder) + kwargs['bl_order'] = 1 # Order for the baseline fitting + kwargs['max_flag_frac'] = 0.9 # Remove antennae with significant amounts of flagged data + kwargs['in_source'] = source # Source name. This comes from the field name in the MS file keys + kwargs['freq_rest'] = product_dict[product]['freq_rest_MHz'] # Rest frequency of requested line in MHz (ex: "freq_rest = 230538" for CO(2-1)) + kwargs['vel_cube'] = vel_cube = '%.3f~%.3f'%(vlow2, vhigh2) # Range in velocity in km/s to extract the line cube. + kwargs['vel_line'] = vel_line = '%.3f~%.3f'%(vlow1, vhigh1) # Range in velocity in km/s to exclude the line emission from the baseline fit. + kwargs['phase_center'] = phase_center # Provide coordinate of phase center, otherwise set to "False" and coordinates will be read from the data + kwargs['source_vel_kms'] = product_dict[product]['vsys'] # Provide velocity of the source, otherwise set to "False" and coordinates will be read from the data + kwargs['vwidth_kms'] = product_dict[product]['vwidth'] # width in velocity and velocity resolution in km/s + kwargs['chan_dv_kms'] = product_dict[product]['max_chanwidth_kms'] # + kwargs['freq_rest_im'] = product_dict[product]['freq_rest_GHz'] # rest frequency in GHz for imaging + kwargs['name_line'] = product_dict[product]['line_name'] # Name of the line, to be used for naming the files -- will not be used anymore + kwargs['output_file'] = product_dict[product]['output_file'] # Output file path + #kwargs['joint_imaging_dirs'] = joint_imaging_dirs # Do a joint imaging by including *.cal.jy in joint_imaging_dir + #kwargs['joint_imaging_suffix'] = joint_imaging_suffix # Suffix after name_line in the output file name. + kwargs['do_step'] = [] + + # see if there is anything defined in the config_definitions key + parameters = self._kh.get_params_for_singledish(singledish_config='tp') + + if parameters is not None: + for key in parameters: + kwargs[key] = parameters[key] + + logger.info(" kwargs: "+str(kwargs)) + + csdr.run_ALMA_TP_tools(**kwargs) - else: - # Run the modified version of the ALMA pipeline w/ custom imaging routine + else: + # Run the modified version of the ALMA pipeline w/ custom imaging routine - sdalma.runALMAPipeline(path_galaxy=path_galaxy, - in_source=fname_dict['source'][0], - baseline_fit_func='poly', - baseline_fit_order=parameters['bl_order'] if 'bl_order' in parameters else 1, - baseline_linewindowmode='replace', - baseline_linewindow=None, - product_dict=product_dict, - ) + sdalma.runALMAPipeline(path_galaxy=path_galaxy, + in_source=fname_dict['source'][0], + baseline_fit_func='poly', + baseline_fit_order=parameters['bl_order'] if 'bl_order' in parameters else 1, + baseline_linewindowmode='replace', + baseline_linewindow=None, + product_dict=product_dict, + ) -#endregion + #endregion -#region Recipes execute a set of linked tasks for one data set. + #region Recipes execute a set of linked tasks for one data set. - def recipe_process_one_target( - self, - target = None, - product = None, - ): - """ - """ + def recipe_process_one_target( + self, + target = None, + product = None, + ): + """ + """ - # Work out file names and note whether the target is part of a - # mosaic, has single dish data, etc. + # Work out file names and note whether the target is part of a + # mosaic, has single dish data, etc. - logger.warning('recipe_process_one_target is only used for the legacy pipeline!') + logger.warning('recipe_process_one_target is only used for the legacy pipeline!') - fname_dict = self._fname_dict( - target=target, - product=product, - ) - - if fname_dict['sd_file'] == '': - logger.info("Target "+target+" product "+product+" has no single dish data in the singledish_key file.") - return + fname_dict = self._fname_dict( + target=target, + product=product, + ) - # Call tasks + if fname_dict['sd_file'] == '': + logger.info("Target "+target+" product "+product+" has no single dish data in the singledish_key file.") + return - if len(fname_dict['sd_raw_data_list']) > 1: - logger.warning('Warning! Multiple single dish raw data entries are found in the ms_file_key! We will only process the first one! [TODO]') - ## We can only process one single dish raw data for now. Not sure how to combine those. Unless we specify line_product in the ms_file_key? + # Call tasks - for idx in range(len(fname_dict['sd_raw_data_list'])): - self.task_execute_single_dish_pipeline( - target = target, - product = product, - source = fname_dict['source'], - input_raw_data = fname_dict['sd_raw_data_list'][idx], - output_file = fname_dict['sd_file'], - ) - if idx > 0: - break + if len(fname_dict['sd_raw_data_list']) > 1: + logger.warning('Warning! Multiple single dish raw data entries are found in the ms_file_key! We will only process the first one! [TODO]') ## We can only process one single dish raw data for now. Not sure how to combine those. Unless we specify line_product in the ms_file_key? - return + for idx in range(len(fname_dict['sd_raw_data_list'])): + self.task_execute_single_dish_pipeline( + target = target, + product = product, + source = fname_dict['source'], + input_raw_data = fname_dict['sd_raw_data_list'][idx], + output_file = fname_dict['sd_file'], + ) + if idx > 0: + break + ## We can only process one single dish raw data for now. Not sure how to combine those. Unless we specify line_product in the ms_file_key? -#endregion + return -#region Loops + #endregion - def loop_singledish( - self, - do_all=True, - make_directories=True, - ): - """ - Loops over the full set of targets, products, and - configurations to run the postprocessing. Toggle the parts of - the loop using the do_XXX booleans. Other choices affect the - algorithms used. - """ + #region Loops - if do_all: - do_step = True + def loop_singledish( + self, + do_all=True, + make_directories=True, + ): + """ + Loops over the full set of targets, products, and + configurations to run the postprocessing. Toggle the parts of + the loop using the do_XXX booleans. Other choices affect the + algorithms used. + """ - if len(self.get_targets()) == 0: - logger.error("Need a target list.") - return(None) + if do_all: + do_step = True - if len(self.get_all_products()) == 0: - logger.error("Need a products list.") - return(None) + if len(self.get_targets()) == 0: + logger.error("Need a target list.") + return(None) - if make_directories: - self._kh.make_missing_directories(postprocess=True) + if len(self.get_all_products()) == 0: + logger.error("Need a products list.") + return(None) - # + if make_directories: + self._kh.make_missing_directories(postprocess=True) - if do_step: + # - if self.use_legacy_pipeline: - - for this_target, this_product in self.looper(do_targets=True, - do_products=True, - do_configs=False): + if do_step: - self.recipe_process_one_target( - target=this_target, - product=this_product, - ) + if self.use_legacy_pipeline: - else: - for this_target in self.get_targets(): + for this_target, this_product in self.looper(do_targets=True, + do_products=True, + do_configs=False): - self.task_execute_single_dish_pipeline( - target=this_target, - product='all', - ) + self.recipe_process_one_target( + target=this_target, + product=this_product, + ) + else: + for this_target in self.get_targets(): -#endregion + self.task_execute_single_dish_pipeline( + target=this_target, + product='all', + ) diff --git a/phangsPipeline/handlerTemplate.py b/phangsPipeline/handlerTemplate.py index fd4912c4..065ed54c 100644 --- a/phangsPipeline/handlerTemplate.py +++ b/phangsPipeline/handlerTemplate.py @@ -5,11 +5,10 @@ handlers and includes basic list and shared functionality. """ -import os -import glob +import logging + import numpy as np -import logging logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) diff --git a/phangsPipeline/handlerTestImaging.py b/phangsPipeline/handlerTestImaging.py index eb6dc1b2..667073ce 100644 --- a/phangsPipeline/handlerTestImaging.py +++ b/phangsPipeline/handlerTestImaging.py @@ -38,15 +38,14 @@ this_tih.plot_image_gallery(output_file='image_gallery.png') """ -import os import logging -import numpy as np +import os logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) # Check casa environment by importing CASA-only packages -from .casa_check import is_casa_installed +from .check_imports import is_casa_installed casa_enabled = is_casa_installed() if casa_enabled: diff --git a/phangsPipeline/handlerVis.py b/phangsPipeline/handlerVis.py index 732b084b..ee51a29b 100644 --- a/phangsPipeline/handlerVis.py +++ b/phangsPipeline/handlerVis.py @@ -25,1071 +25,1068 @@ """ -import os -import sys import logging +import os -from . import handlerTemplate -from . import utilsFilenames as fnames +# Check casa environment by importing CASA-only packages +from .check_imports import is_casa_installed -# Spectral lines -from . import utilsLines as lines +casa_enabled = is_casa_installed() logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) -# Check casa environment by importing CASA-only packages -from .casa_check import is_casa_installed - -casa_enabled = is_casa_installed() - if casa_enabled: logger.debug('casa_enabled = True') - from . import casaVisRoutines as cvr - # reload(cvr) ## else: logger.debug('casa_enabled = False') +if casa_enabled: -class VisHandler(handlerTemplate.HandlerTemplate): - """ - Class to manipulate calibrated ALMA visibility data (measurement - sets), extracting lines, combining multiple data sets, and - carrying out other steps in prepration for imaging. - """ - - ############ - # __init__ # - ############ + from . import casaVisRoutines as cvr + from . import handlerTemplate + from . import utilsFilenames as fnames + from . import utilsLines as lines - def __init__(self, key_handler=None, dry_run=False): + class VisHandler(handlerTemplate.HandlerTemplate): """ + Class to manipulate calibrated ALMA visibility data (measurement + sets), extracting lines, combining multiple data sets, and + carrying out other steps in prepration for imaging. """ - # Can't use super and keep python2/3 agnostic - handlerTemplate.HandlerTemplate.__init__( - self, key_handler=key_handler, dry_run=dry_run) - - # region Loops - - ###################################### - # Loop through all steps and targets # - ###################################### - - def loop_stage_uvdata( - self, - do_all=False, - do_copy=False, - do_remove_staging=False, - do_custom=False, - do_contsub=False, - do_extract_line=False, - do_extract_cont=False, - extra_ext='', - make_directories=True, - statwt_line=True, - statwt_cont=True, - intent=None, - timebin=None, - just_projects=None, - strict_config=True, - require_full_line_coverage=False, - require_full_cont_coverage=False, - overwrite=False): - """ - Loops over the full set of targets, products, and configurations - to run the uv data processing. Toggle the parts of the loop - using the do_XXX booleans. Other choices affect the algorithms - used. - - The strict_config option sets whether to require that a target has data - from ALL arrays that make up the configuration (True) or not (False). - - The require_full_line_coverage option sets whether to require a - measurement set to completely cover a given line's frequency range - (True) or not (False). - """ - - if make_directories: - self._kh.make_missing_directories(imaging=True) - - if do_all: - do_copy = True - do_contsub = True - do_custom = True - do_extract_line = True - do_extract_cont = True - do_remove_staging = True - - target_list = self.get_targets() - product_list = self.get_all_products() - config_list = self.get_interf_configs() - - # Our first loop goes over the individual measurement sets, - # splits, and continuum subtracts the data. At this stage we - # have no knowledge of configs except that selection may - # reduce the number of input measurement sets. - - for this_target, this_project, this_array_tag, this_obsnum in \ - self._kh.loop_over_input_ms( - target=target_list, - config=config_list, - project=just_projects, - strict_config=strict_config): - - for this_product in product_list: - - # Our first step uses CASA's split to extract the relevant - # fields and spectral windows from each input data set. - - if do_copy: - self.task_split( - target=this_target, - project=this_project, - array_tag=this_array_tag, - obsnum=this_obsnum, - product=this_product, - intent=intent, - timebin=timebin, - require_full_line_coverage=require_full_line_coverage, - overwrite=overwrite, - ) - - # Run custom processing. Not currently used. - - if do_custom: - pass - - # Next we apply uv continuum subtraction. We may later offer - # an algorithm choice to do this before or after - # regridding. The correct choice depends on some details of - # the observation setup. - - if this_product in self._kh.get_line_products() and do_contsub: - self.task_contsub( - target=this_target, - project=this_project, - array_tag=this_array_tag, - obsnum=this_obsnum, - product=this_product, - # could add algorithm flags here - overwrite=overwrite) - # Now we reprocess the data to have the desired spectral - # setup(s). This involves rebinning and regridding for line - # products and flagging and integration for continuum - # products. This requires cross-talk among the different - # measurement sets. - - for this_target, this_product, this_config in \ - self.looper( - do_targets=True, - do_products=True, - do_configs=True, - just_line=True, - just_interf=True): - - if strict_config: - # this seems like it doesn't do anything - do we - # actually want a test here and if we do shouldn't - # it be checking if this is false then - # continuing? In theory this is checked above. - self._kh.has_data_for_config( - target=this_target, - config=this_config, - strict=True) - - if do_extract_line: - - if this_product in self._kh.get_line_products(): - self.task_extract_line( + ############ + # __init__ # + ############ + + def __init__(self, key_handler=None, dry_run=False): + """ + """ + # Can't use super and keep python2/3 agnostic + handlerTemplate.HandlerTemplate.__init__( + self, key_handler=key_handler, dry_run=dry_run) + + # region Loops + + ###################################### + # Loop through all steps and targets # + ###################################### + + def loop_stage_uvdata( + self, + do_all=False, + do_copy=False, + do_remove_staging=False, + do_custom=False, + do_contsub=False, + do_extract_line=False, + do_extract_cont=False, + extra_ext='', + make_directories=True, + statwt_line=True, + statwt_cont=True, + intent=None, + timebin=None, + just_projects=None, + strict_config=True, + require_full_line_coverage=False, + require_full_cont_coverage=False, + overwrite=False): + """ + Loops over the full set of targets, products, and configurations + to run the uv data processing. Toggle the parts of the loop + using the do_XXX booleans. Other choices affect the algorithms + used. + + The strict_config option sets whether to require that a target has data + from ALL arrays that make up the configuration (True) or not (False). + + The require_full_line_coverage option sets whether to require a + measurement set to completely cover a given line's frequency range + (True) or not (False). + """ + + if make_directories: + self._kh.make_missing_directories(imaging=True) + + if do_all: + do_copy = True + do_contsub = True + do_custom = True + do_extract_line = True + do_extract_cont = True + do_remove_staging = True + + target_list = self.get_targets() + product_list = self.get_all_products() + config_list = self.get_interf_configs() + + # Our first loop goes over the individual measurement sets, + # splits, and continuum subtracts the data. At this stage we + # have no knowledge of configs except that selection may + # reduce the number of input measurement sets. + + for this_target, this_project, this_array_tag, this_obsnum in \ + self._kh.loop_over_input_ms( + target=target_list, + config=config_list, + project=just_projects, + strict_config=strict_config): + + for this_product in product_list: + + # Our first step uses CASA's split to extract the relevant + # fields and spectral windows from each input data set. + + if do_copy: + self.task_split( + target=this_target, + project=this_project, + array_tag=this_array_tag, + obsnum=this_obsnum, + product=this_product, + intent=intent, + timebin=timebin, + require_full_line_coverage=require_full_line_coverage, + overwrite=overwrite, + ) + + # Run custom processing. Not currently used. + + if do_custom: + pass + + # Next we apply uv continuum subtraction. We may later offer + # an algorithm choice to do this before or after + # regridding. The correct choice depends on some details of + # the observation setup. + + if this_product in self._kh.get_line_products() and do_contsub: + self.task_contsub( + target=this_target, + project=this_project, + array_tag=this_array_tag, + obsnum=this_obsnum, + product=this_product, + # could add algorithm flags here + overwrite=overwrite) + + # Now we reprocess the data to have the desired spectral + # setup(s). This involves rebinning and regridding for line + # products and flagging and integration for continuum + # products. This requires cross-talk among the different + # measurement sets. + + for this_target, this_product, this_config in \ + self.looper( + do_targets=True, + do_products=True, + do_configs=True, + just_line=True, + just_interf=True): + + if strict_config: + # this seems like it doesn't do anything - do we + # actually want a test here and if we do shouldn't + # it be checking if this is false then + # continuing? In theory this is checked above. + self._kh.has_data_for_config( target=this_target, config=this_config, - product=this_product, - exact=False, - do_statwt=statwt_line, - extra_ext_in="", - contsub="prefer", - # could add algorithm flags here - require_full_line_coverage=require_full_line_coverage, - overwrite=overwrite, - strict_config=strict_config) - - for this_target, this_product, this_config in \ - self.looper( - do_targets=True, - do_products=True, - do_configs=True, - just_cont=True, - just_interf=True): - - # Same as above - check / revise - if strict_config: - self._kh.has_data_for_config( - target=this_target, - config=this_config, - strict=True) - - if do_extract_cont: - - if this_product in self._kh.get_continuum_products(): - self.task_extract_continuum( + strict=True) + + if do_extract_line: + + if this_product in self._kh.get_line_products(): + self.task_extract_line( + target=this_target, + config=this_config, + product=this_product, + exact=False, + do_statwt=statwt_line, + extra_ext_in="", + contsub="prefer", + # could add algorithm flags here + require_full_line_coverage=require_full_line_coverage, + overwrite=overwrite, + strict_config=strict_config) + + for this_target, this_product, this_config in \ + self.looper( + do_targets=True, + do_products=True, + do_configs=True, + just_cont=True, + just_interf=True): + + # Same as above - check / revise + if strict_config: + self._kh.has_data_for_config( target=this_target, - product=this_product, config=this_config, - exact=False, - do_statwt=statwt_cont, - require_full_cont_coverage=require_full_cont_coverage, - overwrite=overwrite, - strict_config=strict_config) - - # Clean up the staged measurement sets. They cost time to - # re-split, but have a huge disk imprint and are redundant - # with the concatenated data and original data. - - for this_target, this_project, this_array_tag, this_obsnum in \ - self._kh.loop_over_input_ms( - target=target_list, - config=config_list, - project=just_projects, - strict_config=strict_config): - - for this_product in product_list: - - if do_remove_staging: - self.remove_staged_products( - target=this_target, - project=this_project, - array_tag=this_array_tag, - obsnum=this_obsnum, - product=this_product, - strict_config=strict_config) - - return () - - # endregion - - # region Tasks - - ########################################## - # Tasks - individual operations on data. # - ########################################## - - def task_split( - self, - target=None, - project=None, - array_tag=None, - obsnum=None, - product=None, - intent=None, - extra_ext_out='', - do_statwt=False, - timebin=None, - use_symlink=True, - require_full_line_coverage=False, - overwrite=False): - """ - Copy visibility data for one target, project, array_tag, - obsnum combination from their original location to the imaging - directory for the target. Then optionally split out only the - science targets. - """ - - if target is None: - logger.error("Please specify a target.") - raise Exception("Please specify a target.") - - if project is None: - logger.error("Please specify a project.") - raise Exception("Please specify a project.") - - if array_tag is None: - logger.error("Please specify an array_tag.") - raise Exception("Please specify an array_tag.") - - if obsnum is None: - logger.error("Please specify an obsnum.") - raise Exception("Please specify an obsnum.") - - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Splitting u-v data for") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") - - infile = self._kh.get_file_for_input_ms( - target=target, project=project, array_tag=array_tag, obsnum=obsnum) - - logger.info("... file: " + infile) - - if infile is None: - logger.error("Infile not found. Returning.") - return () + strict=True) + + if do_extract_cont: + + if this_product in self._kh.get_continuum_products(): + self.task_extract_continuum( + target=this_target, + product=this_product, + config=this_config, + exact=False, + do_statwt=statwt_cont, + require_full_cont_coverage=require_full_cont_coverage, + overwrite=overwrite, + strict_config=strict_config) + + # Clean up the staged measurement sets. They cost time to + # re-split, but have a huge disk imprint and are redundant + # with the concatenated data and original data. + + for this_target, this_project, this_array_tag, this_obsnum in \ + self._kh.loop_over_input_ms( + target=target_list, + config=config_list, + project=just_projects, + strict_config=strict_config): + + for this_product in product_list: + + if do_remove_staging: + self.remove_staged_products( + target=this_target, + project=this_project, + array_tag=this_array_tag, + obsnum=this_obsnum, + product=this_product, + strict_config=strict_config) - if not os.path.isdir(infile): - logger.error("Infile not found. Returning.") return () - field = self._kh.get_field_for_input_ms( - target=target, project=project, array_tag=array_tag, obsnum=obsnum) - if (field.lower()).strip() == 'all': - field = '' - - if intent is None: - intent='OBSERVE_TARGET*' - - outfile = fnames.get_staged_msname( - target=target, project=project, array_tag=array_tag, obsnum=obsnum, - product=product, ext=extra_ext_out) - - logger.info("... output: " + outfile) - - # Check existence of output data and abort if found and overwrite is off + # endregion + + # region Tasks + + ########################################## + # Tasks - individual operations on data. # + ########################################## + + def task_split( + self, + target=None, + project=None, + array_tag=None, + obsnum=None, + product=None, + intent=None, + extra_ext_out='', + do_statwt=False, + timebin=None, + use_symlink=True, + require_full_line_coverage=False, + overwrite=False): + """ + Copy visibility data for one target, project, array_tag, + obsnum combination from their original location to the imaging + directory for the target. Then optionally split out only the + science targets. + """ + + if target is None: + logger.error("Please specify a target.") + raise Exception("Please specify a target.") + + if project is None: + logger.error("Please specify a project.") + raise Exception("Please specify a project.") + + if array_tag is None: + logger.error("Please specify an array_tag.") + raise Exception("Please specify an array_tag.") + + if obsnum is None: + logger.error("Please specify an obsnum.") + raise Exception("Please specify an obsnum.") + + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Splitting u-v data for") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") + + infile = self._kh.get_file_for_input_ms( + target=target, project=project, array_tag=array_tag, obsnum=obsnum) + + logger.info("... file: " + infile) + + if infile is None: + logger.error("Infile not found. Returning.") + return () - if os.path.isdir(outfile) and not os.path.isdir(outfile + '.touch'): - if not overwrite: - logger.warning('... found existing output data "' + outfile + '", will not overwrite it.') + if not os.path.isdir(infile): + logger.error("Infile not found. Returning.") return () - # If the user doesn't override the time bin, get it from the - # key handler. + field = self._kh.get_field_for_input_ms( + target=target, project=project, array_tag=array_tag, obsnum=obsnum) + if (field.lower()).strip() == 'all': + field = '' - if timebin is None: - timebin = self._kh.get_timebin_for_array_tag(array_tag=array_tag) - logger.info("... timebin: " + str(timebin)) + if intent is None: + intent='OBSERVE_TARGET*' - # If requested, select on SPW for the product + outfile = fnames.get_staged_msname( + target=target, project=project, array_tag=array_tag, obsnum=obsnum, + product=product, ext=extra_ext_out) - spw = '' - if product is not None: + logger.info("... output: " + outfile) - logger.info("... product: " + str(product)) + # Check existence of output data and abort if found and overwrite is off - if product in self._kh.get_line_products(): + if os.path.isdir(outfile) and not os.path.isdir(outfile + '.touch'): + if not overwrite: + logger.warning('... found existing output data "' + outfile + '", will not overwrite it.') + return () - this_line = self._kh.get_line_tag_for_line_product(product) - vsys, vwidth = \ - self._kh.get_system_velocity_and_velocity_width_for_target( - target, check_parent=False) - max_chanwidth_kms = \ - self._kh.get_channel_width_for_line_product(product) + # If the user doesn't override the time bin, get it from the + # key handler. - combinespw = self._kh.get_contsub_combinespw(product=product) - if combinespw is None: - combinespw = False + if timebin is None: + timebin = self._kh.get_timebin_for_array_tag(array_tag=array_tag) + logger.info("... timebin: " + str(timebin)) - logger.info("... combinespw: " + str(combinespw)) + # If requested, select on SPW for the product - if not self._dry_run: - if combinespw: - spw = cvr.find_spws_for_science(infile=infile) - else: - spw = cvr.find_spws_for_line( - infile=infile, line=this_line, - max_chanwidth_kms=max_chanwidth_kms, - vsys_kms=vsys, vwidth_kms=vwidth, - require_full_line_coverage=require_full_line_coverage) + spw = '' + if product is not None: - if spw is None or len(spw) == 0: - logger.warning( - "... No SPWs meet the selection criteria. " - "Skipping.") - - return () - - if product in self._kh.get_continuum_products(): - spw = cvr.find_spws_for_science(infile=infile) - - logger.info("... extracting spws :" + str(spw)) - - # Change to the imaging directory for the target - - _ = self._kh.get_imaging_dir_for_target(target, changeto=True) - - if not self._dry_run: - cvr.split_science_targets( - infile=infile, - outfile=outfile, - field=field, - intent=intent, - spw=spw, - timebin=timebin, - do_statwt=do_statwt, - overwrite=overwrite) - - return () - - def remove_staged_products( - self, - target=None, - project=None, - array_tag=None, - obsnum=None, - product=None, - extra_ext='', - strict_config=True): - """ - Remove 'staged' visibility products, which are intermediate - between the calibrated data and the concated measurement sets - that we begin processing on. Run this step after concat to - reduct the disk footprint of the pipeline. - """ - - if target is None: - logger.error("Please specify a target.") - raise Exception("Please specify a target.") + logger.info("... product: " + str(product)) - if project is None: - logger.error("Please specify a project.") - raise Exception("Please specify a project.") + if product in self._kh.get_line_products(): - if array_tag is None: - logger.error("Please specify an array_tag.") - raise Exception("Please specify an array_tag.") + this_line = self._kh.get_line_tag_for_line_product(product) + vsys, vwidth = \ + self._kh.get_system_velocity_and_velocity_width_for_target( + target, check_parent=False) + max_chanwidth_kms = \ + self._kh.get_channel_width_for_line_product(product) - if obsnum is None: - logger.error("Please specify an obsnum.") - raise Exception("Please specify an obsnum.") + combinespw = self._kh.get_contsub_combinespw(product=product) + if combinespw is None: + combinespw = False - infile = fnames.get_staged_msname( - target=target, project=project, array_tag=array_tag, obsnum=obsnum, - product=product, ext=extra_ext) + logger.info("... combinespw: " + str(combinespw)) - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Clearing intermediate staged u-v data for " + infile) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") + if not self._dry_run: + if combinespw: + spw = cvr.find_spws_for_science(infile=infile) + else: + spw = cvr.find_spws_for_line( + infile=infile, line=this_line, + max_chanwidth_kms=max_chanwidth_kms, + vsys_kms=vsys, vwidth_kms=vwidth, + require_full_line_coverage=require_full_line_coverage) - # Change to the imaging directory for the target + if spw is None or len(spw) == 0: + logger.warning( + "... No SPWs meet the selection criteria. " + "Skipping.") - _ = self._kh.get_imaging_dir_for_target(target, changeto=True) + return () - if not self._dry_run: - os.system('rm -rf ' + infile) - os.system('rm -rf ' + infile + '.contsub') + if product in self._kh.get_continuum_products(): + spw = cvr.find_spws_for_science(infile=infile) - return () - - def task_concat_uvdata( - self, - target=None, - product=None, - config=None, - just_projects=None, - extra_ext_in='', - extra_ext_out='', - overwrite=False, - strict_config=True): - """ - Concatenate all measurement sets for the supplied - target+config+product combination. - """ + logger.info("... extracting spws :" + str(spw)) - if target is None: - logger.error("Please specify a target.") - raise Exception("Please specify a target.") + # Change to the imaging directory for the target - if product is None: - logger.error("Please specify a product.") - raise Exception("Please specify a product.") + _ = self._kh.get_imaging_dir_for_target(target, changeto=True) - if config is None: - logger.error("Please specify a config.") - raise Exception("Please specify a config.") - - # Change to the imaging directory for the target + if not self._dry_run: + cvr.split_science_targets( + infile=infile, + outfile=outfile, + field=field, + intent=intent, + spw=spw, + timebin=timebin, + do_statwt=do_statwt, + overwrite=overwrite) - _ = self._kh.get_imaging_dir_for_target(target, changeto=True) + return () - # Generate the list of staged measurement sets to combine + def remove_staged_products( + self, + target=None, + project=None, + array_tag=None, + obsnum=None, + product=None, + extra_ext='', + strict_config=True): + """ + Remove 'staged' visibility products, which are intermediate + between the calibrated data and the concated measurement sets + that we begin processing on. Run this step after concat to + reduct the disk footprint of the pipeline. + """ + + if target is None: + logger.error("Please specify a target.") + raise Exception("Please specify a target.") + + if project is None: + logger.error("Please specify a project.") + raise Exception("Please specify a project.") + + if array_tag is None: + logger.error("Please specify an array_tag.") + raise Exception("Please specify an array_tag.") + + if obsnum is None: + logger.error("Please specify an obsnum.") + raise Exception("Please specify an obsnum.") + + infile = fnames.get_staged_msname( + target=target, project=project, array_tag=array_tag, obsnum=obsnum, + product=product, ext=extra_ext) + + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Clearing intermediate staged u-v data for " + infile) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") + + # Change to the imaging directory for the target + + _ = self._kh.get_imaging_dir_for_target(target, changeto=True) - staged_ms_list = [] - for this_target, this_project, this_array_tag, this_obsnum in \ - self._kh.loop_over_input_ms( - target=target, - config=config, - project=just_projects, - strict_config=strict_config): + if not self._dry_run: + os.system('rm -rf ' + infile) + os.system('rm -rf ' + infile + '.contsub') - this_staged_ms = fnames.get_staged_msname( - target=this_target, project=this_project, - array_tag=this_array_tag, obsnum=this_obsnum, product=product, - ext=extra_ext_in) - if os.path.isdir(this_staged_ms): - staged_ms_list.append(this_staged_ms) - else: - logger.warning( - "MS not found and will be dropped from concat: " + - str(this_staged_ms)) - logger.warning("This might or might not be a problem.") - - if len(staged_ms_list) == 0: - logger.warning("No measurement sets to concatenate, returning.") return () - # Generate the outfile name - - outfile = fnames.get_vis_filename( - target=target, config=config, product=product, - ext=extra_ext_out, suffix=None) - - # Revise here - - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Concatenating staged and split u-v data for:") - logger.info("... target: " + target) - logger.info("... product: " + product) - logger.info("... config: " + config) - logger.info("... files: " + str(staged_ms_list)) - logger.info("... output: " + str(outfile)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") - - # Concatenate the measurement sets - - if not self._dry_run: - cvr.concat_ms( - infile_list=staged_ms_list, - outfile=outfile, - overwrite=overwrite, - copypointing=False) # come back later - - return () - - def task_contsub( - self, - target=None, - project=None, - array_tag=None, - obsnum=None, - product=None, - extra_ext_in='', - overwrite=False): - """ - Run u-v plane continuum subtraction on an individual input - measurement set. - """ - - if target is None: - logger.error("Please specify a target.") - raise Exception("Please specify a target.") - - if project is None: - logger.error("Please specify a project.") - raise Exception("Please specify a project.") - - if array_tag is None: - logger.error("Please specify an array_tag.") - raise Exception("Please specify an array_tag.") - - if obsnum is None: - logger.error("Please specify an obsnum.") - raise Exception("Please specify an obsnum.") - - infile = fnames.get_staged_msname( - target=target, project=project, - array_tag=array_tag, obsnum=obsnum, product=product, - ext=extra_ext_in) - - # get target vsys and vwidth - # if part of linear mosaic, use vsys, vwidth of the parent target - vsys, vwidth = \ - self._kh.get_system_velocity_and_velocity_width_for_target( - target, check_parent=True) - - # Get lines to exclude. - - lines_to_exclude = self._kh.get_lines_to_flag(product=product) - this_line_tag = self._kh.get_line_tag_for_line_product(product) - if len(lines_to_exclude) == 0: - lines_to_exclude = [this_line_tag] - - # Translate these into frequency ranges - - ranges_to_exclude = lines.get_ghz_range_for_list( - line_list=lines_to_exclude, vsys_kms=vsys, vwidth_kms=vwidth) - - # Check for manually defined frequency windows: - manual_range_to_exclude = self._kh.get_contsub_excludefreqrange(product=product) - if manual_range_to_exclude is not None: - # This needs to incorporate it into the range, and should look for overlap - # to extend the already excluded region. - - distinct_ranges = [] - - for this_range in manual_range_to_exclude: - if not len(this_range) == 2: - raise ValueError("Parameter `exclude_freq_ranges_ghz` in target_definitions.txt must be a list" - " of 2 element lists with a low and high frequency. Given: "+str(this_range)) - freq_low = min(this_range) - freq_high = max(this_range) - - # NOTE: this assumes only 1 defined are from ranges_to_exclude - existfreq_low = min(ranges_to_exclude[0]) - existfreq_high = max(ranges_to_exclude[0]) - - # Does this fall completely within the existing range for a single target. - if existfreq_low <= freq_low and existfreq_high >= freq_high: - # No need to adjust. Keep original - distinct_ranges.append(list(ranges_to_exclude[0])) - continue - # Is it completely separate? - elif existfreq_low >= freq_high or existfreq_high <= freq_low: - # Include the original and new - distinct_ranges.append(list(ranges_to_exclude[0])) - distinct_ranges.append(this_range) - # Does it extend the lower side? - elif existfreq_low >= freq_low and existfreq_high >= freq_high: - distinct_ranges.append([freq_low, existfreq_high]) - # Does it extend the upper side? - elif existfreq_low <= freq_low and existfreq_high <= freq_high: - distinct_ranges.append([existfreq_low, freq_high]) - # Does it completely enclose the range? - elif existfreq_low >= freq_low and existfreq_high <= freq_high: - distinct_ranges.append([freq_low, freq_high]) - - # Reduce the range list to unique ranges. - ranges_to_exclude = list(set([tuple(this_range) for this_range in distinct_ranges])) - - # Query the keyhandler for the details of continuum subtraction - - fitorder = self._kh.get_contsub_fitorder(product=product) - if fitorder is None: - fitorder = 0 - - combinespw = self._kh.get_contsub_combinespw(product=product) - if combinespw is None: - combinespw = False - - combine = '' - if combinespw: - combine = 'spw' - - flag_edge_fraction = self._kh.get_contsub_flagedgefraction(product=product) - - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("u-v continuum subtraction for") - logger.info("... file: " + infile) - logger.info("... output: " + infile + '.contsub') - logger.info("... excluding frequency ranges: " + str(ranges_to_exclude)) - logger.info("... fitorder: " + str(fitorder)) - logger.info("... combine: " + str(combine)) - logger.info("... flag_edge_fraction: " + str(flag_edge_fraction)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") - - # Change to the imaging directory for the target - - _ = self._kh.get_imaging_dir_for_target(target, changeto=True) - - if not self._dry_run: - cvr.contsub( - infile=infile, - # outfile is not an option right now, comes out ".contsub" - ranges_to_exclude=ranges_to_exclude, - overwrite=overwrite, - fitorder=fitorder, - combine=combine, - flag_edge_fraction=flag_edge_fraction) - - return () - - def task_run_custom_scripts( - self, - target=None, - product=None, - config=None, - extra_ext=''): - """ - """ - pass - - def task_extract_line( - self, - target=None, - product=None, - config=None, - exact=False, - contsub="prefer", - extra_ext_in='', - extra_ext_out='', - do_statwt=True, - edge_for_statwt=None, - method="regrid_then_rebin", - require_full_line_coverage=False, - overwrite=False, - strict_config=True): - """ - Extract spectral line data from ms data for the input target, - config and product. - """ - - # Error checking + def task_concat_uvdata( + self, + target=None, + product=None, + config=None, + just_projects=None, + extra_ext_in='', + extra_ext_out='', + overwrite=False, + strict_config=True): + """ + Concatenate all measurement sets for the supplied + target+config+product combination. + """ + + if target is None: + logger.error("Please specify a target.") + raise Exception("Please specify a target.") + + if product is None: + logger.error("Please specify a product.") + raise Exception("Please specify a product.") + + if config is None: + logger.error("Please specify a config.") + raise Exception("Please specify a config.") + + # Change to the imaging directory for the target + + _ = self._kh.get_imaging_dir_for_target(target, changeto=True) + + # Generate the list of staged measurement sets to combine + + staged_ms_list = [] + for this_target, this_project, this_array_tag, this_obsnum in \ + self._kh.loop_over_input_ms( + target=target, + config=config, + project=just_projects, + strict_config=strict_config): + + this_staged_ms = fnames.get_staged_msname( + target=this_target, project=this_project, + array_tag=this_array_tag, obsnum=this_obsnum, product=product, + ext=extra_ext_in) + if os.path.isdir(this_staged_ms): + staged_ms_list.append(this_staged_ms) + else: + logger.warning( + "MS not found and will be dropped from concat: " + + str(this_staged_ms)) + logger.warning("This might or might not be a problem.") - if target is None: - logger.error("Please specify a target.") - raise Exception("Please specify a target.") + if len(staged_ms_list) == 0: + logger.warning("No measurement sets to concatenate, returning.") + return () - if product is None: - logger.error("Please specify a product.") - raise Exception("Please specify a product.") + # Generate the outfile name - if config is None: - logger.error("Please specify a config.") - raise Exception("Please specify a config.") + outfile = fnames.get_vis_filename( + target=target, config=config, product=product, + ext=extra_ext_out, suffix=None) - # If the user wants statwt but doesn't override the edge - # value, get it from the key handler. + # Revise here - if do_statwt: - if edge_for_statwt is None: - edge_for_statwt = \ - self._kh.get_statwt_edge_for_line_product(product=product) + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Concatenating staged and split u-v data for:") + logger.info("... target: " + target) + logger.info("... product: " + product) + logger.info("... config: " + config) + logger.info("... files: " + str(staged_ms_list)) + logger.info("... output: " + str(outfile)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") - # Option re: continuum subtraction + # Concatenate the measurement sets - valid_contsub_options = ['prefer', 'require', 'none'] - if contsub.lower().strip() not in valid_contsub_options: - logger.error("Please choose a valid contsub option.") - logger.error("Valid options are:" + str(valid_contsub_options)) - raise Exception("Please choose a valid contsub option.") + if not self._dry_run: + cvr.concat_ms( + infile_list=staged_ms_list, + outfile=outfile, + overwrite=overwrite, + copypointing=False) # come back later - # Compile a list of input files, looping over the staged - # measurement sets. + return () - _ = self._kh.get_imaging_dir_for_target(target, changeto=True) + def task_contsub( + self, + target=None, + project=None, + array_tag=None, + obsnum=None, + product=None, + extra_ext_in='', + overwrite=False): + """ + Run u-v plane continuum subtraction on an individual input + measurement set. + """ + + if target is None: + logger.error("Please specify a target.") + raise Exception("Please specify a target.") + + if project is None: + logger.error("Please specify a project.") + raise Exception("Please specify a project.") + + if array_tag is None: + logger.error("Please specify an array_tag.") + raise Exception("Please specify an array_tag.") + + if obsnum is None: + logger.error("Please specify an obsnum.") + raise Exception("Please specify an obsnum.") + + infile = fnames.get_staged_msname( + target=target, project=project, + array_tag=array_tag, obsnum=obsnum, product=product, + ext=extra_ext_in) - logger.debug('') - logger.debug('task_extract_line') - logger.debug('loop_over_input_ms') - logger.debug('target=%s' % (str([target]))) - logger.debug('config=%s' % (str([config]))) - # # we have not excluded the combined interf config '12m+7m' + # get target vsys and vwidth + # if part of linear mosaic, use vsys, vwidth of the parent target + vsys, vwidth = \ + self._kh.get_system_velocity_and_velocity_width_for_target( + target, check_parent=True) + + # Get lines to exclude. + + lines_to_exclude = self._kh.get_lines_to_flag(product=product) + this_line_tag = self._kh.get_line_tag_for_line_product(product) + if len(lines_to_exclude) == 0: + lines_to_exclude = [this_line_tag] + + # Translate these into frequency ranges + + ranges_to_exclude = lines.get_ghz_range_for_list( + line_list=lines_to_exclude, vsys_kms=vsys, vwidth_kms=vwidth) + + # Check for manually defined frequency windows: + manual_range_to_exclude = self._kh.get_contsub_excludefreqrange(product=product) + if manual_range_to_exclude is not None: + # This needs to incorporate it into the range, and should look for overlap + # to extend the already excluded region. + + distinct_ranges = [] + + for this_range in manual_range_to_exclude: + if not len(this_range) == 2: + raise ValueError("Parameter `exclude_freq_ranges_ghz` in target_definitions.txt must be a list" + " of 2 element lists with a low and high frequency. Given: "+str(this_range)) + freq_low = min(this_range) + freq_high = max(this_range) + + # NOTE: this assumes only 1 defined are from ranges_to_exclude + existfreq_low = min(ranges_to_exclude[0]) + existfreq_high = max(ranges_to_exclude[0]) + + # Does this fall completely within the existing range for a single target. + if existfreq_low <= freq_low and existfreq_high >= freq_high: + # No need to adjust. Keep original + distinct_ranges.append(list(ranges_to_exclude[0])) + continue + # Is it completely separate? + elif existfreq_low >= freq_high or existfreq_high <= freq_low: + # Include the original and new + distinct_ranges.append(list(ranges_to_exclude[0])) + distinct_ranges.append(this_range) + # Does it extend the lower side? + elif existfreq_low >= freq_low and existfreq_high >= freq_high: + distinct_ranges.append([freq_low, existfreq_high]) + # Does it extend the upper side? + elif existfreq_low <= freq_low and existfreq_high <= freq_high: + distinct_ranges.append([existfreq_low, freq_high]) + # Does it completely enclose the range? + elif existfreq_low >= freq_low and existfreq_high <= freq_high: + distinct_ranges.append([freq_low, freq_high]) + + # Reduce the range list to unique ranges. + ranges_to_exclude = list(set([tuple(this_range) for this_range in distinct_ranges])) + + # Query the keyhandler for the details of continuum subtraction + + fitorder = self._kh.get_contsub_fitorder(product=product) + if fitorder is None: + fitorder = 0 + + combinespw = self._kh.get_contsub_combinespw(product=product) + if combinespw is None: + combinespw = False + + combine = '' + if combinespw: + combine = 'spw' + + flag_edge_fraction = self._kh.get_contsub_flagedgefraction(product=product) + + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("u-v continuum subtraction for") + logger.info("... file: " + infile) + logger.info("... output: " + infile + '.contsub') + logger.info("... excluding frequency ranges: " + str(ranges_to_exclude)) + logger.info("... fitorder: " + str(fitorder)) + logger.info("... combine: " + str(combine)) + logger.info("... flag_edge_fraction: " + str(flag_edge_fraction)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") + + # Change to the imaging directory for the target + + _ = self._kh.get_imaging_dir_for_target(target, changeto=True) - infile_dict = {} - for this_target, this_project, this_array_tag, this_obsnum in \ - self._kh.loop_over_input_ms(target=[target], - config=[config], - project=None, - strict_config=strict_config): + if not self._dry_run: + cvr.contsub( + infile=infile, + # outfile is not an option right now, comes out ".contsub" + ranges_to_exclude=ranges_to_exclude, + overwrite=overwrite, + fitorder=fitorder, + combine=combine, + flag_edge_fraction=flag_edge_fraction) - # The name of the staged measurement set with this - # combination of target, project, array, obsnum. + return () - this_infile = fnames.get_staged_msname( - target=this_target, project=this_project, - array_tag=this_array_tag, obsnum=this_obsnum, - product=product, ext=extra_ext_in) + def task_run_custom_scripts( + self, + target=None, + product=None, + config=None, + extra_ext=''): + """ + """ + pass + + def task_extract_line( + self, + target=None, + product=None, + config=None, + exact=False, + contsub="prefer", + extra_ext_in='', + extra_ext_out='', + do_statwt=True, + edge_for_statwt=None, + method="regrid_then_rebin", + require_full_line_coverage=False, + overwrite=False, + strict_config=True): + """ + Extract spectral line data from ms data for the input target, + config and product. + """ + + # Error checking + + if target is None: + logger.error("Please specify a target.") + raise Exception("Please specify a target.") + + if product is None: + logger.error("Please specify a product.") + raise Exception("Please specify a product.") + + if config is None: + logger.error("Please specify a config.") + raise Exception("Please specify a config.") + + # If the user wants statwt but doesn't override the edge + # value, get it from the key handler. - # Check for existence of original data and continuum - # subtraction. + if do_statwt: + if edge_for_statwt is None: + edge_for_statwt = \ + self._kh.get_statwt_edge_for_line_product(product=product) + + # Option re: continuum subtraction + + valid_contsub_options = ['prefer', 'require', 'none'] + if contsub.lower().strip() not in valid_contsub_options: + logger.error("Please choose a valid contsub option.") + logger.error("Valid options are:" + str(valid_contsub_options)) + raise Exception("Please choose a valid contsub option.") + + # Compile a list of input files, looping over the staged + # measurement sets. + + _ = self._kh.get_imaging_dir_for_target(target, changeto=True) + + logger.debug('') + logger.debug('task_extract_line') + logger.debug('loop_over_input_ms') + logger.debug('target=%s' % (str([target]))) + logger.debug('config=%s' % (str([config]))) + # # we have not excluded the combined interf config '12m+7m' + + infile_dict = {} + for this_target, this_project, this_array_tag, this_obsnum in \ + self._kh.loop_over_input_ms(target=[target], + config=[config], + project=None, + strict_config=strict_config): + + # The name of the staged measurement set with this + # combination of target, project, array, obsnum. + + this_infile = fnames.get_staged_msname( + target=this_target, project=this_project, + array_tag=this_array_tag, obsnum=this_obsnum, + product=product, ext=extra_ext_in) + + # Check for existence of original data and continuum + # subtraction. + + infile_dict[this_infile] = {} + infile_dict[this_infile]['present'] = \ + os.path.isdir(this_infile) + infile_dict[this_infile]['contsub'] = \ + os.path.isdir(this_infile + '.contsub') + + # Implement the logic related to continuum + # subtraction. Options are "require" (use only data with + # continuum subtraction), "prefer" (if any data are missing + # continuum subtraction but are present then skip continuum + # subtraction), or "none" (use original data). + + infile_list = [] + + if contsub == 'prefer': + + all_have_contsub = True + + for this_infile in infile_dict.keys(): + if not infile_dict[this_infile]['present']: + continue + if not infile_dict[this_infile]['contsub']: + all_have_contsub = False + + if all_have_contsub: + logger.info( + "All files have continuum subtraction. Using that.") + contsub = 'require' + else: + logger.info( + "Some files missing continuum subtraction. Skipping.") + contsub = 'none' - infile_dict[this_infile] = {} - infile_dict[this_infile]['present'] = \ - os.path.isdir(this_infile) - infile_dict[this_infile]['contsub'] = \ - os.path.isdir(this_infile + '.contsub') + if contsub == 'require': - # Implement the logic related to continuum - # subtraction. Options are "require" (use only data with - # continuum subtraction), "prefer" (if any data are missing - # continuum subtraction but are present then skip continuum - # subtraction), or "none" (use original data). + logger.warning(infile_dict) - infile_list = [] + for this_infile in infile_dict.keys(): + if infile_dict[this_infile]['contsub']: + infile_list.append(this_infile + '.contsub') + logger.warning("In file: {}".format(this_infile)) + else: + logger.warning( + "File lacks contsub, skipping: " + str(this_infile)) - if contsub == 'prefer': + if contsub == 'none': - all_have_contsub = True + for this_infile in infile_dict.keys(): + if infile_dict[this_infile]['present']: + infile_list.append(this_infile) + else: + logger.warning("File missing, skipping: " + str(this_infile)) - for this_infile in infile_dict.keys(): - if not infile_dict[this_infile]['present']: - continue - if not infile_dict[this_infile]['contsub']: - all_have_contsub = False + if len(infile_list) == 0: + logger.warning("No files to process.") + return () - if all_have_contsub: - logger.info( - "All files have continuum subtraction. Using that.") - contsub = 'require' - else: - logger.info( - "Some files missing continuum subtraction. Skipping.") - contsub = 'none' + # Define the output file. Line extraction has a concatenation + # step, so the individual measurement sets will be combined + # after extraction. - if contsub == 'require': + outfile = fnames.get_vis_filename( + target=target, config=config, product=product, + ext=extra_ext_out, suffix=None) + + # Extract the spectral information needed for the regrid + + vsys_kms, vwidth_kms = \ + self._kh.get_system_velocity_and_velocity_width_for_target( + target, check_parent=False) + line_to_extract = self._kh.get_line_tag_for_line_product(product) + + valid_methods = [ + 'regrid_then_rebin', 'rebin_then_regrid', + 'just_regrid', 'just_rebin'] + if method.lower().strip() not in valid_methods: + logger.error("Not a valid line extraction method - " + str(method)) + raise Exception("Please specify a valid line extraction method.") + + target_chanwidth = self._kh.get_channel_width_for_line_product(product) + + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Extracting spectral product:") + logger.info("... Line: " + str(line_to_extract)) + logger.info("... Vsys [km/s]: " + str(vsys_kms)) + logger.info("... Vwidth [km/s]: " + str(vwidth_kms)) + logger.info("... Method: " + str(method)) + logger.info("... From files: " + str(infile_list)) + logger.info("... To file: " + str(outfile)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") + + # Check existence of output data and abort if found and overwrite is off + + if os.path.isdir(outfile) and not os.path.isdir(outfile + '.touch'): + if not overwrite: + logger.warning('... found existing output data "' + outfile + '", will not overwrite it.') + return () - logger.warning(infile_dict) + if not self._dry_run: - for this_infile in infile_dict.keys(): - if infile_dict[this_infile]['contsub']: - infile_list.append(this_infile + '.contsub') - logger.warning("In file: {}".format(this_infile)) - else: - logger.warning( - "File lacks contsub, skipping: " + str(this_infile)) + cvr.batch_extract_line( + infile_list=infile_list, + outfile=outfile, + target_chan_kms=target_chanwidth, + line=line_to_extract, + vsys_kms=vsys_kms, vwidth_kms=vwidth_kms, + method=method, + exact=exact, + overwrite=overwrite, + clear_pointing=False, + require_full_line_coverage=require_full_line_coverage) + + if do_statwt: + cvr.reweight_data( + infile=outfile, + edge_kms=edge_for_statwt, + overwrite=overwrite) - if contsub == 'none': + return () + def task_extract_continuum( + self, + target=None, + product=None, + config=None, + exact=False, + extra_ext_in='', + extra_ext_out='', + do_statwt=True, + method="regrid_then_rebin", + require_full_cont_coverage=False, + overwrite=False, + strict_config=True): + """ + Extract continuum data from ms data for the input target, config, + and product. + """ + + # Error checking + + if target is None: + logger.error("Please specify a target.") + raise Exception("Please specify a target.") + + if product is None: + logger.error("Please specify a product.") + raise Exception("Please specify a product.") + + if config is None: + logger.error("Please specify a config.") + raise Exception("Please specify a config.") + + _ = self._kh.get_imaging_dir_for_target(target, changeto=True) + + logger.debug('') + logger.debug('task_extract_continuum') + logger.debug('loop_over_input_ms') + logger.debug('target=%s' % (str([target]))) + logger.debug('config=%s' % (str([config]))) + logger.debug('product=%s' % (product)) + # # we have not excluded the combined interf config '12m+7m' + + infile_dict = {} + for this_target, this_project, this_array_tag, this_obsnum in \ + self._kh.loop_over_input_ms(target=[target], + config=[config], + project=None, + strict_config=strict_config): + # The name of the staged measurement set with this + # combination of target, project, array, obsnum. + + this_infile = fnames.get_staged_msname( + target=this_target, project=this_project, + array_tag=this_array_tag, obsnum=this_obsnum, + product=product, ext=extra_ext_in) + + # Check for existence of original data and continuum + # subtraction. + + infile_dict[this_infile] = {} + infile_dict[this_infile]['present'] = \ + os.path.isdir(this_infile) + + # If no ms data found for the given target, then just return. + # This could happen if the target name is a mosaic target, + # and each ms data will be named by the mosaic parts. + + if len(infile_dict) == 0: + return + + infile_list = [] for this_infile in infile_dict.keys(): if infile_dict[this_infile]['present']: infile_list.append(this_infile) - else: - logger.warning("File missing, skipping: " + str(this_infile)) - - if len(infile_list) == 0: - logger.warning("No files to process.") - return () - # Define the output file. Line extraction has a concatenation - # step, so the individual measurement sets will be combined - # after extraction. - - outfile = fnames.get_vis_filename( - target=target, config=config, product=product, - ext=extra_ext_out, suffix=None) - - # Extract the spectral information needed for the regrid - - vsys_kms, vwidth_kms = \ - self._kh.get_system_velocity_and_velocity_width_for_target( - target, check_parent=False) - line_to_extract = self._kh.get_line_tag_for_line_product(product) - - valid_methods = [ - 'regrid_then_rebin', 'rebin_then_regrid', - 'just_regrid', 'just_rebin'] - if method.lower().strip() not in valid_methods: - logger.error("Not a valid line extraction method - " + str(method)) - raise Exception("Please specify a valid line extraction method.") - - target_chanwidth = self._kh.get_channel_width_for_line_product(product) - - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Extracting spectral product:") - logger.info("... Line: " + str(line_to_extract)) - logger.info("... Vsys [km/s]: " + str(vsys_kms)) - logger.info("... Vwidth [km/s]: " + str(vwidth_kms)) - logger.info("... Method: " + str(method)) - logger.info("... From files: " + str(infile_list)) - logger.info("... To file: " + str(outfile)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") - - # Check existence of output data and abort if found and overwrite is off - - if os.path.isdir(outfile) and not os.path.isdir(outfile + '.touch'): - if not overwrite: - logger.warning('... found existing output data "' + outfile + '", will not overwrite it.') - return () + # Note that there is a concatenation step - if not self._dry_run: - - cvr.batch_extract_line( - infile_list=infile_list, - outfile=outfile, - target_chan_kms=target_chanwidth, - line=line_to_extract, - vsys_kms=vsys_kms, vwidth_kms=vwidth_kms, - method=method, - exact=exact, - overwrite=overwrite, - clear_pointing=False, - require_full_line_coverage=require_full_line_coverage) + outfile = fnames.get_vis_filename( + target=target, config=config, product=product, + ext=extra_ext_out, suffix=None) + + # Extract necessary information for flagging lines + # get target vsys and vwidth use the parent vsys, vwidth + # when part of a linear mosaic. useful when spectral chunks are defined + vsys_kms, vwidth_kms = \ + self._kh.get_system_velocity_and_velocity_width_for_target( + target, check_parent=True) + lines_to_flag = self._kh.get_lines_to_flag(product=product) + + # Extract the spectral information needed for the regrid/rebin + ranges_to_extract = self._kh.get_freq_ranges_for_cont_product(product) + target_chanwidth = self._kh.get_channel_width_for_cont_product(product) + + valid_methods = [ + 'regrid_then_rebin', 'rebin_then_regrid', + 'just_regrid', 'just_rebin'] + if method.lower().strip() not in valid_methods: + logger.error( + "Not a valid continuum extraction method - " + str(method)) + raise Exception( + "Please specify a valid continuum extraction method.") + + logger.info("") + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("Extracting continuum product:") + logger.info("... Extracting ranges: " + str(ranges_to_extract)) + logger.info("... Lines to flag: " + str(lines_to_flag)) + logger.info("... Target channel width: " + str(target_chanwidth)) + logger.info("... Method: " + str(method)) + logger.info("... From files: " + str(infile_list)) + logger.info("... To file: " + str(outfile)) + logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") + logger.info("") - if do_statwt: - cvr.reweight_data( - infile=outfile, - edge_kms=edge_for_statwt, - overwrite=overwrite) + if not self._dry_run: - return () - - def task_extract_continuum( - self, - target=None, - product=None, - config=None, - exact=False, - extra_ext_in='', - extra_ext_out='', - do_statwt=True, - method="regrid_then_rebin", - require_full_cont_coverage=False, - overwrite=False, - strict_config=True): - """ - Extract continuum data from ms data for the input target, config, - and product. - """ + cvr.batch_extract_continuum( + infile_list=infile_list, + outfile=outfile, + ranges_to_extract=ranges_to_extract, + target_chan_ghz=target_chanwidth, + lines_to_flag=lines_to_flag, + vsys_kms=vsys_kms, + vwidth_kms=vwidth_kms, + method=method, + exact=exact, + overwrite=overwrite, + clear_pointing=False, + require_full_cont_coverage=require_full_cont_coverage) + + if do_statwt: + # not sure if we need this here + pass - # Error checking - - if target is None: - logger.error("Please specify a target.") - raise Exception("Please specify a target.") - - if product is None: - logger.error("Please specify a product.") - raise Exception("Please specify a product.") - - if config is None: - logger.error("Please specify a config.") - raise Exception("Please specify a config.") - - _ = self._kh.get_imaging_dir_for_target(target, changeto=True) - - logger.debug('') - logger.debug('task_extract_continuum') - logger.debug('loop_over_input_ms') - logger.debug('target=%s' % (str([target]))) - logger.debug('config=%s' % (str([config]))) - logger.debug('product=%s' % (product)) - # # we have not excluded the combined interf config '12m+7m' - - infile_dict = {} - for this_target, this_project, this_array_tag, this_obsnum in \ - self._kh.loop_over_input_ms(target=[target], - config=[config], - project=None, - strict_config=strict_config): - # The name of the staged measurement set with this - # combination of target, project, array, obsnum. - - this_infile = fnames.get_staged_msname( - target=this_target, project=this_project, - array_tag=this_array_tag, obsnum=this_obsnum, - product=product, ext=extra_ext_in) - - # Check for existence of original data and continuum - # subtraction. - - infile_dict[this_infile] = {} - infile_dict[this_infile]['present'] = \ - os.path.isdir(this_infile) - - # If no ms data found for the given target, then just return. - # This could happen if the target name is a mosaic target, - # and each ms data will be named by the mosaic parts. - - if len(infile_dict) == 0: - return - - infile_list = [] - for this_infile in infile_dict.keys(): - if infile_dict[this_infile]['present']: - infile_list.append(this_infile) - - # Note that there is a concatenation step - - outfile = fnames.get_vis_filename( - target=target, config=config, product=product, - ext=extra_ext_out, suffix=None) - - # Extract necessary information for flagging lines - # get target vsys and vwidth use the parent vsys, vwidth - # when part of a linear mosaic. useful when spectral chunks are defined - vsys_kms, vwidth_kms = \ - self._kh.get_system_velocity_and_velocity_width_for_target( - target, check_parent=True) - lines_to_flag = self._kh.get_lines_to_flag(product=product) - - # Extract the spectral information needed for the regrid/rebin - ranges_to_extract = self._kh.get_freq_ranges_for_cont_product(product) - target_chanwidth = self._kh.get_channel_width_for_cont_product(product) - - valid_methods = [ - 'regrid_then_rebin', 'rebin_then_regrid', - 'just_regrid', 'just_rebin'] - if method.lower().strip() not in valid_methods: - logger.error( - "Not a valid continuum extraction method - " + str(method)) - raise Exception( - "Please specify a valid continuum extraction method.") - - logger.info("") - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("Extracting continuum product:") - logger.info("... Extracting ranges: " + str(ranges_to_extract)) - logger.info("... Lines to flag: " + str(lines_to_flag)) - logger.info("... Target channel width: " + str(target_chanwidth)) - logger.info("... Method: " + str(method)) - logger.info("... From files: " + str(infile_list)) - logger.info("... To file: " + str(outfile)) - logger.info("&%&%&%&%&%&%&%&%&%&%&%&%&%") - logger.info("") - - if not self._dry_run and casa_enabled: - - cvr.batch_extract_continuum( - infile_list=infile_list, - outfile=outfile, - ranges_to_extract=ranges_to_extract, - target_chan_ghz=target_chanwidth, - lines_to_flag=lines_to_flag, - vsys_kms=vsys_kms, - vwidth_kms=vwidth_kms, - method=method, - exact=exact, - overwrite=overwrite, - clear_pointing=False, - require_full_cont_coverage=require_full_cont_coverage) + return () - if do_statwt: - # not sure if we need this here - pass - - return () - - def task_remove_concat( - self, - target=None, - product=None, - config=None, - extra_ext_in='', - suffixes=None): - """ - Remove any concatenated measurement sets. These are - intermediate (though time consuming) products not needed for - imaging. This procedure wipes them and saves disk space. - """ + def task_remove_concat( + self, + target=None, + product=None, + config=None, + extra_ext_in='', + suffixes=None): + """ + Remove any concatenated measurement sets. These are + intermediate (though time consuming) products not needed for + imaging. This procedure wipes them and saves disk space. + """ - # Error checking + # Error checking - if target is None: - logger.error("Please specify a target.") - raise Exception("Please specify a target.") + if target is None: + logger.error("Please specify a target.") + raise Exception("Please specify a target.") - if product is None: - logger.error("Please specify a product.") - raise Exception("Please specify a product.") + if product is None: + logger.error("Please specify a product.") + raise Exception("Please specify a product.") - if config is None: - logger.error("Please specify a config.") - raise Exception("Please specify a config.") + if config is None: + logger.error("Please specify a config.") + raise Exception("Please specify a config.") - _ = self._kh.get_imaging_dir_for_target(target, changeto=True) + _ = self._kh.get_imaging_dir_for_target(target, changeto=True) - if suffixes is None: - suffixes = [''] - if isinstance(suffixes, list): - suffixes = [suffixes] + if suffixes is None: + suffixes = [''] + if isinstance(suffixes, list): + suffixes = [suffixes] - for this_suffix in suffixes: - if this_suffix == '': - this_suffix = None + for this_suffix in suffixes: + if this_suffix == '': + this_suffix = None - infile = fnames.get_vis_filename( - target=target, config=config, product=product, - ext=extra_ext_in, suffix=this_suffix) + infile = fnames.get_vis_filename( + target=target, config=config, product=product, + ext=extra_ext_in, suffix=this_suffix) - logger.info('Removing ' + infile) + logger.info('Removing ' + infile) - if not self._dry_run: - os.system('rm -rf ' + infile) + if not self._dry_run: + os.system('rm -rf ' + infile) -# endregion + # endregion diff --git a/phangsPipeline/pipelineLogger.py b/phangsPipeline/pipelineLogger.py index f684ad55..3b7891fd 100644 --- a/phangsPipeline/pipelineLogger.py +++ b/phangsPipeline/pipelineLogger.py @@ -1,13 +1,13 @@ - import logging, os -DefaultLevel = 'DEBUG' +# Ensure CASA is installed +from .check_imports import is_casa_installed -try: +casa_enabled = is_casa_installed() +if is_casa_installed(): from . import casaStuff - HasCasaLog = True -except: - HasCasaLog = False + +DefaultLevel = 'DEBUG' class PipelineLogger(logging.getLoggerClass()): @@ -34,10 +34,10 @@ def __del__(self): #print('PipelineLogger.__del__') if self.file_handler is not None: self.file_handler.close() - + def hasCasaLog(self): - global HasCasaLog - return HasCasaLog + casa_enabled = is_casa_installed() + return casa_enabled #return ('casalog' in globals()) def setCasaOrigin(self): diff --git a/phangsPipeline/pipelineVersion.py b/phangsPipeline/pipelineVersion.py deleted file mode 100644 index ba3f95fa..00000000 --- a/phangsPipeline/pipelineVersion.py +++ /dev/null @@ -1,14 +0,0 @@ -from __future__ import unicode_literals - -# Update this as versions increase -tableversion = '1.6' - -try: - from .version import version -except ImportError: - # NOTE: this is here to match with previous versions and when - # the version.py file has not been generated on package installation - - # For now this needs to be updated manually. - # Setting to last tagged version on 2022/02/21. - version = "v3.0" diff --git a/phangsPipeline/scConvolution.py b/phangsPipeline/scConvolution.py index 88903efe..a55c4316 100644 --- a/phangsPipeline/scConvolution.py +++ b/phangsPipeline/scConvolution.py @@ -1,10 +1,9 @@ import logging -import numpy as np import astropy.units as u -from astropy.io import fits +import numpy as np from astropy.convolution import Box1DKernel -from astropy.convolution import convolve, convolve_fft +from astropy.io import fits from radio_beam import Beam from spectral_cube import SpectralCube, LazyMask, Projection diff --git a/phangsPipeline/scDerivativeRoutines.py b/phangsPipeline/scDerivativeRoutines.py index b41c1169..a5052258 100644 --- a/phangsPipeline/scDerivativeRoutines.py +++ b/phangsPipeline/scDerivativeRoutines.py @@ -1,12 +1,14 @@ import inspect import logging -import numpy as np import astropy.units as u +import numpy as np +from astropy.convolution import Box1DKernel from astropy.io import fits -from spectral_cube import SpectralCube, Projection +from scipy.interpolate import interp1d +from spectral_cube import Projection -from .pipelineVersion import tableversion, version +from . import __version__ logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) @@ -81,9 +83,7 @@ def update_metadata(projection, cube, error=False): except KeyError: pass - hdr['COMMENT'] = 'Produced with PHANGS-ALMA pipeline version ' + version - if tableversion: - hdr['COMMENT'] = 'Galaxy properties from PHANGS sample table version ' + tableversion + hdr['COMMENT'] = 'Produced with PHANGS-ALMA pipeline version ' + __version__ hdr['COMMENT'] = (btype + ' generated by collapsing cube over ' + collapse_name + ' axis.') @@ -932,7 +932,6 @@ def write_tmax(cubein, if window is not None: window = u.Quantity(window) - from astropy.convolution import Box1DKernel dv = channel_width(new_cube) nChan = (window / dv).to(u.dimensionless_unscaled).value if nChan > 1: @@ -1035,7 +1034,6 @@ def write_vmax(cubein, Return products calculated in the map """ if type(window) is u.Quantity: - from astropy.convolution import Box1DKernel dv = channel_width(cube) nChan = (window / dv).to(u.dimensionless_unscaled).value if nChan > 1: @@ -1147,10 +1145,7 @@ def write_vquad(cubein, Return products calculated in the map """ - from scipy.interpolate import interp1d - if type(window) is u.Quantity: - from astropy.convolution import Box1DKernel dv = channel_width(cubein) nChan = (window / dv).to(u.dimensionless_unscaled).value if nChan > 1: diff --git a/phangsPipeline/scMaskingRoutines.py b/phangsPipeline/scMaskingRoutines.py index f95cb475..593461a8 100644 --- a/phangsPipeline/scMaskingRoutines.py +++ b/phangsPipeline/scMaskingRoutines.py @@ -1,22 +1,16 @@ import logging from functools import reduce +import astropy.units as u +import astropy.wcs as wcs import numpy as np -import scipy.ndimage.morphology as morph import scipy.ndimage as nd -import scipy.stats as ss -from scipy.signal import savgol_coeffs -import astropy.wcs as wcs -import astropy.units as u -from astropy.stats import mad_std -from astropy.convolution import convolve, Gaussian2DKernel +import scipy.ndimage.morphology as morph from astropy.io import fits -from spectral_cube import Projection,SpectralCube - -from .pipelineVersion import tableversion, version +from spectral_cube import Projection, SpectralCube -from .scNoiseRoutines import mad_zero_centered from .scDerivativeRoutines import convert_and_reproject +from . import __version__ np.seterr(divide='ignore', invalid='ignore') @@ -1015,9 +1009,7 @@ def recipe_phangs_broad_mask( header = mask.header header['DATAMAX'] = 1 header['DATAMIN'] = 0 - header['COMMENT'] = 'Produced with PHANGS-ALMA pipeline version ' + version - if tableversion: - header['COMMENT'] = 'Galaxy properties from PHANGS sample table version ' + tableversion + header['COMMENT'] = 'Produced with PHANGS-ALMA pipeline version ' + __version__ hdu = fits.PrimaryHDU(np.array(mask.filled_data[:], dtype=np.uint8), header=header) hdu.writeto(outfile, overwrite=overwrite) diff --git a/phangsPipeline/scMoments.py b/phangsPipeline/scMoments.py index 5956e5c4..fceaecd2 100644 --- a/phangsPipeline/scMoments.py +++ b/phangsPipeline/scMoments.py @@ -2,8 +2,8 @@ import logging import warnings -import numpy as np import astropy.units as u +import numpy as np from spectral_cube import SpectralCube from . import scDerivativeRoutines as scdr diff --git a/phangsPipeline/scNoiseRoutines.py b/phangsPipeline/scNoiseRoutines.py index 774b5721..ba6e9f29 100644 --- a/phangsPipeline/scNoiseRoutines.py +++ b/phangsPipeline/scNoiseRoutines.py @@ -2,17 +2,12 @@ import numpy as np import scipy.ndimage as nd -import scipy.ndimage.morphology as morph import scipy.stats as ss -from scipy.signal import savgol_coeffs -import astropy.wcs as wcs -import astropy.units as u from astropy.convolution import convolve_fft, Gaussian2DKernel, convolve -from astropy.io import fits -from astropy.stats import mad_std +from scipy.signal import savgol_coeffs from spectral_cube import SpectralCube -from .pipelineVersion import tableversion, version +from . import __version__ np.seterr(divide='ignore', invalid='ignore') @@ -196,6 +191,10 @@ def noise_cube(data, step = np.min([step, pixdim // 20]) halfbox = int(box // 2) + # If step ends up being 0, then set back to 1 + if step == 0: + step = 1 + if substride is None: substride = 1 @@ -535,9 +534,7 @@ def recipe_phangs_noise( header['DATAMIN'] = datamin if np.isfinite(datamax): header['DATAMAX'] = datamax - header['COMMENT'] = 'Produced with PHANGS-ALMA pipeline version ' + version - if tableversion: - header['COMMENT'] = 'Galaxy properties from PHANGS sample table version ' + tableversion + header['COMMENT'] = 'Produced with PHANGS-ALMA pipeline version ' + __version__ rms = SpectralCube(rms, wcs=cube.wcs, header=header, meta={'BUNIT':cube.header['BUNIT']}) diff --git a/phangsPipeline/scStackingRoutines.py b/phangsPipeline/scStackingRoutines.py index 98d310e4..97663bd9 100644 --- a/phangsPipeline/scStackingRoutines.py +++ b/phangsPipeline/scStackingRoutines.py @@ -1,14 +1,12 @@ -import numpy as np -import scipy.ndimage as nd - import astropy.units as u import astropy.wcs as wcs +import numpy as np from astropy.io import fits - from spectral_cube import SpectralCube, Projection from .scDerivativeRoutines import convert_and_reproject + def channelShiftVec(x, ChanShift): """Shift an array of spectra by some number of channels using the FFT. diff --git a/phangsPipeline/statsHandler.py b/phangsPipeline/statsHandler.py index ee9b8bdc..bb6d0a88 100755 --- a/phangsPipeline/statsHandler.py +++ b/phangsPipeline/statsHandler.py @@ -9,26 +9,17 @@ but it's not essential or generally integrated. """ -from __future__ import print_function -import os, sys, re, shutil import json -import glob import logging -import numpy as np -import scipy.ndimage.morphology as morph -import scipy.ndimage as nd +import os -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - -# Check casa environment by importing CASA-only packages -from .casa_check import is_casa_installed -casa_enabled = is_casa_installed() - -from . import handlerTemplate +from . import casaImagingRoutines as cir from . import handlerKeys +from . import handlerTemplate from . import utilsFilenames -from . import casaImagingRoutines as cir + +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) class StatsHandler(handlerTemplate.HandlerTemplate): @@ -215,10 +206,7 @@ def task_residual_regression( master_key = 'keys/master_key.txt' #master_key = '/Users/dzliu/Work/AlmaPhangs/Works/20200630_PHANGS_ALMA_clean_records/test_phangs_working_dir/keys/master_key.txt' if not os.path.isfile(master_key): - if sys.version_info.major <= 2: - master_key = raw_input("Please input your master key file path: ") - else: - master_key = input("Please input your master key file path: ") + master_key = input("Please input your master key file path: ") if master_key.find("'") >= 0: master_key = master_key.replace("'", "") diff --git a/phangsPipeline/taskSDIntImaging.py b/phangsPipeline/taskSDIntImaging.py index 5d0b616d..bd243f42 100644 --- a/phangsPipeline/taskSDIntImaging.py +++ b/phangsPipeline/taskSDIntImaging.py @@ -9,20 +9,15 @@ import copy import logging -import os -import shutil - -from . import casaMaskingRoutines as cmr -from . import casaStuff - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) from casatasks.private.imagerhelpers.imager_base import PySynthesisImager from casatasks.private.imagerhelpers.input_parameters import ImagerParameters -#from casatasks.private.cleanhelper import write_tclean_history, get_func_params +# from casatasks.private.cleanhelper import write_tclean_history, get_func_params from casatasks.private.sdint_helper import * +from . import casaMaskingRoutines as cmr +from . import casaStuff + # Pull MPI in, if available try: @@ -35,6 +30,9 @@ sdintlib = SDINT_helper() synu = casaStuff.synthesisutils() +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) + # Setup functions def setup_imager_obj(param_list=None): diff --git a/phangsPipeline/tests/__init__.py b/phangsPipeline/tests/__init__.py deleted file mode 100644 index 838b4573..00000000 --- a/phangsPipeline/tests/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Licensed under a 3-clause BSD style license - see LICENSE.rst -""" -This module contains package tests. -""" diff --git a/phangsPipeline/utilsFieldSelection.py b/phangsPipeline/utilsFieldSelection.py index 45bd6eda..e4b3ee1a 100644 --- a/phangsPipeline/utilsFieldSelection.py +++ b/phangsPipeline/utilsFieldSelection.py @@ -1,14 +1,15 @@ """ utilsFieldSelection.py """ -import os, sys, re, shutil +import os +import re +import shutil + +import analysisUtils as aU import numpy as np -# CASA stuff from . import casaStuff -#sys.path.insert(0, '/software/casa/analysis_scripts') -import analysisUtils as aU tb = aU.createCasaTool(casaStuff.tbtool) split = casaStuff.split @@ -224,10 +225,5 @@ def process_ms_list( if verbose: print('Splitting {!r} field={!r} -> {!r} ({}/{})'.format(vis, valid_fields, outputvis, i+1, len(ms_list))) split(vis, outputvis, field=valid_fields, datacolumn=datacolumn) + if verbose: print('Processed {!r} -> {!r} ({}/{})'.format(vis, outputvis, i+1, len(ms_list))) - - - - - - diff --git a/phangsPipeline/utilsFilenames.py b/phangsPipeline/utilsFilenames.py index 70a55a41..2a5ef1b5 100644 --- a/phangsPipeline/utilsFilenames.py +++ b/phangsPipeline/utilsFilenames.py @@ -2,9 +2,8 @@ Utilities for defining file names. """ -import os, ast - import logging + logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) diff --git a/phangsPipeline/utilsImages.py b/phangsPipeline/utilsImages.py index 12c8cbe8..d710b789 100644 --- a/phangsPipeline/utilsImages.py +++ b/phangsPipeline/utilsImages.py @@ -1,17 +1,12 @@ # Routines relates to python (not CASA) image handling. -import numpy as np -import os -import re -import warnings -import math - import astropy.units as u -from astropy.units import Quantity - +import numpy as np +from astropy.coordinates import SkyCoord from astropy.io import fits +from astropy.units import Quantity from astropy.wcs import WCS -from astropy.coordinates import SkyCoord + # ------------------------------------------------------------------------ # Make new headers diff --git a/phangsPipeline/utilsKeyReaders.py b/phangsPipeline/utilsKeyReaders.py index 2d1e65f5..5955d456 100644 --- a/phangsPipeline/utilsKeyReaders.py +++ b/phangsPipeline/utilsKeyReaders.py @@ -2,12 +2,10 @@ Utilities for reading our pipeline-specific keys. """ -import os, re, ast - -# There's room to further reduce code and redundancy here. For now, -# this may not be worth the time. - +import ast import logging +import os +import re logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) diff --git a/phangsPipeline/utilsLines.py b/phangsPipeline/utilsLines.py index e6e5f35d..6a6e0ad6 100644 --- a/phangsPipeline/utilsLines.py +++ b/phangsPipeline/utilsLines.py @@ -1,7 +1,7 @@ # This is the line list. -import re import logging +import re import numpy as np diff --git a/phangsPipeline/utilsLists.py b/phangsPipeline/utilsLists.py index 4179fb32..34c93718 100644 --- a/phangsPipeline/utilsLists.py +++ b/phangsPipeline/utilsLists.py @@ -2,7 +2,6 @@ General purpose utilities. """ -import numpy as np def select_from_list( master_list, diff --git a/phangsPipeline/utilsSingleDish.py b/phangsPipeline/utilsSingleDish.py index d138f20b..171f6b2e 100644 --- a/phangsPipeline/utilsSingleDish.py +++ b/phangsPipeline/utilsSingleDish.py @@ -1,16 +1,13 @@ import os -import numpy as np - -from .casaStuff import casa_version, tbtool, msmdtool, metool, qatool -# Analysis utilities import analysisUtils as au +import matplotlib +import numpy as np +import pylab as pb from analysisUtils import mjdSecondsListToDateTime, mjdsecToUT -# Import pylab for plotting -import pylab as pb -import matplotlib +from .casaStuff import casa_version, tbtool, msmdtool, metool, qatool casaVersion = "{0}.{1}.{2}".format(*casa_version) diff --git a/phangsPipeline/utilsTestImagingPlots.py b/phangsPipeline/utilsTestImagingPlots.py index 17769d14..658abb55 100644 --- a/phangsPipeline/utilsTestImagingPlots.py +++ b/phangsPipeline/utilsTestImagingPlots.py @@ -58,7 +58,7 @@ except ImportError: HAS_ASTROPY = False -from .casa_check import is_casa_installed +from .check_imports import is_casa_installed casa_enabled = is_casa_installed() if casa_enabled: diff --git a/phangsPipeline/utilsTheoreticalNoise.py b/phangsPipeline/utilsTheoreticalNoise.py index 9b73cfa7..68d9c9ec 100644 --- a/phangsPipeline/utilsTheoreticalNoise.py +++ b/phangsPipeline/utilsTheoreticalNoise.py @@ -2,26 +2,10 @@ import logging -import numpy as np - -import scipy.ndimage as nd -import scipy.ndimage.morphology as morph -import scipy.stats as ss -from scipy.signal import savgol_coeffs - -import astropy.wcs as wcs -import astropy.units as u -from astropy.units import Quantity - -from astropy.convolution import convolve, Gaussian2DKernel -from astropy.io import fits -from astropy.stats import mad_std from astropy.table import Table from .utilsImages import * -from .pipelineVersion import tableversion, version - np.seterr(divide='ignore', invalid='ignore') logger = logging.getLogger(__name__) diff --git a/pyproject.toml b/pyproject.toml index 2966dc43..d579ac36 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,8 +1,77 @@ -[build-system] +[project] + +name = "phangsPipeline" +version = "4.0.0" +description = "PHANGS-ALMA Pipeline" +readme = "README.md" +requires-python = ">=3.12, <3.13" +license = {file = "LICENSE"} + +authors = [ + {name = "PHANGS Team"}, +] + +# Alphabetically +maintainers = [ + {name = "Eric Koch", email = "koch.eric.w@gmail.com"}, + {name = "Adam Leroy", email = "leroy.42@osu.edu"}, + {name = "Daizhong Liu", email = "astro.dzliu@gmail.com"}, + {name = "Erik Rosolowsky", email = "rosolowsky@ualberta.ca"}, + {name = "Jiayi Sun", email = "astrojysun@outlook.com"}, + {name = "Thomas Williams", email = "thomas.g.williams@manchester.ac.uk"}, +] + +classifiers = [ + "Development Status :: 5 - Production/Stable", + + # License + "License :: OSI Approved :: MIT License", -requires = ["setuptools", - "setuptools_scm", - "wheel", - "oldest-supported-numpy"] + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3 :: Only", +] + +dependencies = [ + "astropy >= 7.2.0", + "astroquery >= 0.4.11", + "numpy >= 2.3.5", + "protobuf == 3.20", + "pytz >= 2025.2", + "scipy >= 1.16.3", + "spectral-cube >= 0.6.7", +] + +# Optional packages for CASA +[project.optional-dependencies] +casa = [ + "casadata >= 2025.9.22", + "casaplotms >= 2.7.4", + "casarecipes >= 0.4.0", + "casashell >= 6.7.3.21", + "casatasks >= 6.7.3.21", + "casatools >= 6.7.3.21", + "casaviewer >= 2.4.4 ; platform_system != 'Darwin'", + "pipeline@git+https://open-bitbucket.nrao.edu/scm/pipe/pipeline.git", +] + +[project.urls] +"Homepage" = "https://github.com/phangsTeam/phangs_imaging_scripts" +"Bug Reports" = "https://github.com/phangsTeam/phangs_imaging_scripts/issues" +"Source" = "https://github.com/phangsTeam/phangs_imaging_scripts" + +[tool.setuptools.packages.find] +exclude = [ + "phangs-alma_keys/*", + "scripts/*", +] + +[build-system] +requires = [ + "setuptools >= 80.9.0", + "setuptools_scm >= 9.2.2", + "wheel >= 0.45.1", +] -build-backend = 'setuptools.build_meta' +build-backend = "setuptools.build_meta" diff --git a/run_casa_pipeline_for_tp.py b/run_casa_pipeline_for_tp.py deleted file mode 100644 index 889b3aee..00000000 --- a/run_casa_pipeline_for_tp.py +++ /dev/null @@ -1,71 +0,0 @@ -""" -To run the Total Power pipeline, make sure you have set up the key files like the following example: - -In "keys/master_key.txt": - - singledish_key singledish_key.txt - -In "keys/singledish_key.txt": - - ngc3351 co10 ALMA_TP.NGC3351.CO10.image.VLSRK.fits - -In "keys/ms_file_key.txt" (point to the TP observation's member uid directory): - - ngc3351 2022.1.00360 all tp 1 2022.1.00360.S/science_goal.uid___A001_X2d20_X2d03/group.uid___A001_X2d20_X2d04/member.uid___A001_X2d20_X2d09 - -In "keys/config_definitions.txt" (no white space inside '{}'): - - singledish_config tp {'bl_order':1,'chan_dv_kms':2.5,'doplots':True} - -Then, we also need to install the 'astropy' package, because the TP pipeline needs the 'pyfits' package, but casa5?6? does not have that anymore. -If your casa's path is like /software/casa/casa-6.6.4-34-py3.8.el7, then run this to install 'astropy' into the directory './scripts/local/': - - /software/casa/casa-6.6.4-34-py3.8.el7/bin/python3 -m pip install astropy==5.2.2 --prefix=./scripts/local/ --ignore-installed - /software/casa/casa-release-5.7.2-4.el7/bin/python -m pip install astropy==5.2.2 --prefix=./scripts/local/ --ignore-installed - -In this script below, you can see that we will add this path into sys.path so that the pipeline can import the 'astropy' package. - -To run the TP pipeline, we will also need the CASA analysisUtil script, which can be obtained from {}. -We put this analysisUtil script under the directory './scripts/analysis_scripts/'. -We also put the 'phangsPipeline' under the directory './scripts/phangs_imaging_scripts/'. -If you have different directories, please change them in this code below. - -""" -import os, sys -sys.path.insert(0, os.getcwd()+'/script/analysis_scripts') -sys.path.insert(0, os.getcwd()+'/script/phangs_imaging_scripts/phangsPipeline') -sys.path.insert(0, os.getcwd()+'/script/phangs_imaging_scripts') -#sys.path.insert(0, os.getcwd()+'/script/local/lib/python3.8/site-packages') -try: - import pyfits -except: - sys.path.insert(0, os.getcwd()+'/script/local/lib/python{}.{}/site-packages'.format(sys.version_info.major, sys.version_info.minor)) - import astropy.io.fits as pyfits -"""Run these in advance: - mkdir script/local - /software/casa/casa-6.6.4-34-py3.8.el7/bin/python3 -m pip install astropy==5.2.2 --prefix=./script/local/ --ignore-installed - /software/casa/casa-release-5.7.2-4.el7/bin/python -m pip install astropy==5.2.2 --prefix=./scripts/local/ --ignore-installed -""" -#if 'importlib' in sys.modules: -# del sys.modules['importlib'] -#if 'importlib.metadata' in sys.modules: -# del sys.modules['importlib.metadata'] -#import packaging as importlib -#from importlib import metadata -#print('importlib.__path__', importlib.__path__) -#print('metadata.__path__', metadata.__path__) -#import astropy - -key_file = os.getcwd()+'/keys/master_key.txt' - -from phangsPipeline import phangsLogger as pl -pl.setup_logger(level='DEBUG', logfile=None) - -from phangsPipeline import handlerKeys as kh -from phangsPipeline import handlerSingleDish as sdh - -this_kh = kh.KeyHandler(master_key=key_file) -this_sdh = sdh.SingleDishHandler(key_handler=this_kh) - -this_sdh.loop_singledish() - diff --git a/run_casa_pipeline_phangs-alma.py b/run_casa_pipeline_phangs-alma.py deleted file mode 100644 index b10eb479..00000000 --- a/run_casa_pipeline_phangs-alma.py +++ /dev/null @@ -1,188 +0,0 @@ -#!/usr/bin/env python -# -# Run this script INSIDE CASA or with CASA available. -# - -# This is the PHANGS ALMA staging and imaging script. - -# This script loads the project data, constructs the PHANGS pipeline -# handlers, and then executes each step: staging, imaging, -# postprocessing. The user has control over which targets, spectral -# products, and steps run. - -# This is a documented version that we provide with the main pipeline -# repository as an example tou users. You should be able to modify -# this script to get a good start on your own wrapper to the pipeline. - - -############################################################################## -# Load routines, initialize handlers -############################################################################## - -import os, sys -import importlib - -# Pipeline directory. Set this to the location on your system - -pipedir = '/data/tycho/0/leroy.42/reduction/alma/phangs_imaging_scripts/' - -# Location of the master key. Set this to the master key that points -# to all of the keys for your project. - -key_file = '/data/tycho/0/leroy.42/reduction/alma/phangs_imaging_scripts/phangs-alma_keys/master_key.txt' - -# Change directory to the pipeline directory. - -os.chdir(pipedir) - -# Make sure we are inside CASA (you will need to modify this to use -# the pipeline via a command line call) - -sys.path.append(os.getcwd()) -casa_enabled = (sys.argv[0].endswith('start_casa.py')) -if not casa_enabled: - print('Please run this script inside CASA!') - sys.exit() - -# Import the logger and initialize the logging. You can change the -# level of message that you want to see by changing "level" here or -# save to a logfile with the keyword. - -from phangsPipeline import phangsLogger as pl -pl.setup_logger(level='DEBUG', logfile=None) - -# Imports - -# sys.path.insert(1, ) -from phangsPipeline import handlerKeys as kh -from phangsPipeline import handlerVis as uvh -from phangsPipeline import handlerImaging as imh -from phangsPipeline import handlerPostprocess as pph - -# Initialize the various handler objects. First initialize the -# KeyHandler, which reads the master key and the files linked in the -# master key. Then feed this keyHandler, which has all the project -# data, into the other handlers (VisHandler, ImagingHandler, -# PostProcessHandler), which run the actual pipeline using the project -# definitions from the KeyHandler. - -this_kh = kh.KeyHandler(master_key=key_file) -this_uvh = uvh.VisHandler(key_handler=this_kh) -this_imh = imh.ImagingHandler(key_handler=this_kh) -this_pph = pph.PostProcessHandler(key_handler=this_kh) - -# Make any missing directories - -this_kh.make_missing_directories(imaging=True, derived=True, postprocess=True, release=True) - -############################################################################## -# Set up what we do this run -############################################################################## - - -# Set the configs (arrays), spectral products (lines), and targets to -# consider. - -# Set the targets. Called with only () it will use all targets. The -# only= , just= , start= , stop= criteria allow one to build a smaller -# list. - -# Set the configs. Set both interf_configs and feather_configs just to -# determine which cubes will be processed. The only effect in this -# derive product calculation is to determine which cubes get fed into -# the calculation. - -# Set the line products. Similarly, this just determines which cubes -# are fed in. Right now there's no derived product pipeline focused on -# continuum maps. - -# ASIDE: In PHANGS-ALMA we ran a cheap parallelization by running -# several scripts with different start and stop values in parallel. If -# you are running a big batch of jobs you might consider scripting -# something similar. - -# Note here that we need to set the targets, configs, and lines for -# *all three* relevant handlers - the VisHandler (uvh), ImagingHandler -# (imh), and PostprocessHandler (pph). The settings below will stage -# combined 12m+7m data sets (including staging C18O and continuum), -# image the CO 2-1 line from these, and then postprocess the CO 2-1 -# cubes. - -this_uvh.set_targets() -this_uvh.set_interf_configs(only=['12m+7m']) -this_uvh.set_line_products() -this_uvh.set_no_cont_products(False) - -# e.g., could be to be more selective: -# this_uvh.set_targets(only=['ngc3489','ngc3599','ngc4476']) -# this_uvh.set_interf_configs(only=['12m+7m']) -# this_uvh.set_line_products(only=['co21']) - -this_imh.set_targets() -this_imh.set_interf_configs(only=['12m+7m']) -this_imh.set_no_cont_products(True) -this_imh.set_line_products(only=['co21']) - -this_pph.set_targets() -this_pph.set_interf_configs(only=['12m+7m']) -this_pph.set_feather_configs(only=['12m+7m+tp']) - -# Use boolean flags to set the steps to be performed when the pipeline -# is called. See descriptions below (but only edit here). - -do_staging = True -do_imaging = True -do_postprocess = True -do_stats = True - -############################################################################## -# Run staging -############################################################################## - -# "Stage" the visibility data. This involves copying the original -# calibrated measurement set, continuum subtracting (if requested), -# extraction of requested lines and continuum data, regridding and -# concatenation into a single measurement set. The overwrite=True flag -# is needed to ensure that previous runs can be overwritten. - -if do_staging: - this_uvh.loop_stage_uvdata(do_copy=True, do_contsub=True, - do_extract_line=False, do_extract_cont=False, - do_remove_staging=False, overwrite=True) - - this_uvh.loop_stage_uvdata(do_copy=False, do_contsub=False, - do_extract_line=True, do_extract_cont=False, - do_remove_staging=False, overwrite=True) - - this_uvh.loop_stage_uvdata(do_copy=False, do_contsub=False, - do_extract_line=False, do_extract_cont=True, - do_remove_staging=False, overwrite=True) - - this_uvh.loop_stage_uvdata(do_copy=False, do_contsub=False, - do_extract_line=False, do_extract_cont=False, - do_remove_staging=True, overwrite=True) - -############################################################################## -# Step through imaging -############################################################################## - -# Image the concatenated, regridded visibility data. The full loop -# involves applying any user-supplied clean mask, multiscale imaging, -# mask generation for the single scale clean, and single scale -# clean. The individual parts can be turned on or off with flags to -# the imaging loop call but this call does everything. - -if do_imaging: - this_imh.loop_imaging(do_all=True) - -############################################################################## -# Step through postprocessing -############################################################################## - -# Postprocess the data in CASA after imaging. This involves primary -# beam correction, linear mosaicking, feathering, conversion to Kelvin -# units, and some downsampling to save space. - -if do_postprocess: - this_pph.loop_postprocess(do_prep=True, do_feather=True, - do_mosaic=True, do_cleanup=True) diff --git a/run_derived_pipeline_phangs-alma.py b/run_derived_pipeline_phangs-alma.py deleted file mode 100644 index b4e4b268..00000000 --- a/run_derived_pipeline_phangs-alma.py +++ /dev/null @@ -1,204 +0,0 @@ -#!/usr/bin/env python -# -# Run this script OUTSIDE CASA in an environment that has astropy, -# spectral-cube, scipy, and numpy. -# - -############################################################################## -# Load routines, change directory, initialize handlers -############################################################################## - -import os, sys -import importlib - -# Pipeline directory. Set this to the location on your system - -pipedir = '/home/leroy.42/python/phangs_imaging_scripts/' - -# Location of the master key. Set this to the master key that points -# to all of the keys for your project. - -key_file = '/home/leroy.42/python/phangs_imaging_scripts/phangs-alma_keys/master_key.txt' - -# Change directory to the pipeline directory. - -os.chdir(pipedir) - -# Import the logger and initialize the logging. You can change the -# level of message that you want to see by changing "level" here or -# save to a logfile with the keyword. - -from phangsPipeline import phangsLogger as pl - -# reloads are useful for debugging but can be commented out -importlib.reload(pl) -pl.setup_logger(level='DEBUG', logfile=None) - -# Imports - -# sys.path.insert(1, ) -from phangsPipeline import handlerKeys as kh -from phangsPipeline import handlerDerived as der - -# reloads are useful for debugging but can be commented out -importlib.reload(kh) -importlib.reload(der) - -# Initialize the various handler objects. First initialize the -# KeyHandler, which reads the master key and the files linked in the -# master key. Then feed this keyHandler, which has all the project -# data, into the other handlers (here DerivedHandler), which run the -# actual pipeline using the project definitions from the KeyHandler. - -this_kh = kh.KeyHandler(master_key=key_file) -this_der = der.DerivedHandler(key_handler=this_kh) - -# Make missing directories - -this_kh.make_missing_directories(imaging=True, derived=True, postprocess=True, release=True) - -############################################################################## -# Set up what we do this run -############################################################################## - -# Set the configs (arrays), spectral products (lines), and targets to -# consider. - -# Set the targets. Called with only () it will use all targets. The -# only= , just= , start= , stop= criteria allow one to build a smaller -# list. - -# Set the configs. Set both interf_configs and feather_configs just to -# determine which cubes will be processed. The only effect in this -# derive product calculation is to determin which cubes get fed into -# the calculation. - -# Set the line products. Similarly, this just determines which cubes -# are fed in. Right now there's no derived product pipeline focused on -# continuum maps. - -# ASIDE: In PHANGS-ALMA we ran a cheap parallelization by running -# several scripts with different start and stop values in parallel. If -# you are running a big batch of jobs you might consider scripting -# something similar. - -this_der.set_targets(only=['ngc3489']) -# this_der.set_targets(only=['ngc1809']) - -this_der.set_interf_configs(skip=['7m','12m','12m+7m']) -this_der.set_feather_configs(only=['7m+tp']) - -this_der.set_line_products(only=['co21']) -this_der.set_no_cont_products(True) - -# Use boolean flags to set the steps to be performed when the pipeline -# is called. See descriptions below (but only edit here). - -do_convolve = False -do_noise = False -do_strictmask = False -do_broadmask = False -do_moments = False -do_secondary = False - -# new DR5 routines (shuffling and flat maps) -do_vfield = False # creates a velocity field for shuffling -do_shuffling = True # runs independently from other tasks -do_flatmask = True # requires noise and broad/strict masks to join -do_flatmaps = True # requires flat masks - -############################################################################## -# Step through derived product creation -############################################################################## - -# Run the calculations requested by the user. The steps are annotated -# here, but in general, do not change anything below this line. Just -# use the flags above to steer the calculation. - -# Convolve the post-processed data products to the various angular and -# physical resolutions specified in the keys. - -if do_convolve: - this_der.loop_derive_products(do_convolve=True, do_noise=False, - do_strictmask=False, do_broadmask=False, - do_moments=False, do_secondary=False) - -# Estimate the noise from the signal-free regions of the data to -# produce a three-dimensional noise model for each cube. - -if do_noise: - this_der.loop_derive_products(do_convolve=False, do_noise=True, - do_strictmask=False, do_broadmask=False, - do_moments=False, do_secondary=False) - -# Construct "strict masks" for each cube at each resolution. - -if do_strictmask: - this_der.loop_derive_products(do_convolve=False, do_noise=False, - do_strictmask=True, do_broadmask=False, - do_moments=False, do_secondary=False) - -# Combine the strict masks across all linked resolutions to form -# "broad masks" that have high completeness. - -if do_broadmask: - this_der.loop_derive_products(do_convolve=False, do_noise=False, - do_strictmask=False, do_broadmask=True, - do_moments=False, do_secondary=False) - -# Apply the masks and use the cubes and noise models to produce moment -# maps with associated uncertainty. - -if do_moments: - this_der.loop_derive_products(do_convolve=False, do_noise=False, - do_strictmask=False, do_broadmask=False, - do_moments=True, do_secondary=False) - -# Run a second round of moment calculations. This enables claculation -# of moments that depend on other, earlier moment map calculations - -if do_secondary: - this_der.loop_derive_products(do_convolve=False, do_noise=False, - do_strictmask=False, do_broadmask=False, - do_moments=False, do_secondary=True) - -# Create velocity field. This creates a combined velocity field from -# a list of derived mom1 maps. - -if do_vfield: - this_der.loop_derive_products(do_convolve=False, do_noise=False, - do_strictmask=False, do_broadmask=False, - do_moments=False, do_secondary=False, - do_vfield=True, do_shuffling=False, - do_flatmask=False, do_flatmaps=False) - -# Create shuffled cubes. This shuffles the processed and derived cubes -# by a velocity offset defined by an input velocity field. - -if do_shuffling: - this_der.loop_derive_products(do_convolve=False, do_noise=False, - do_strictmask=False, do_broadmask=False, - do_moments=False, do_secondary=False, - do_vfield=False, do_shuffling=True, - do_flatmask=False, do_flatmaps=False) - -# Construct flat masks. This combines the existing signal masks -# with a velocity slab based on a input velocity field and velocity -# window. - -if do_flatmask: - this_der.loop_derive_products(do_convolve=False, do_noise=False, - do_strictmask=False, do_broadmask=False, - do_moments=False, do_secondary=False, - do_vfield=False, do_shuffling=False, - do_flatmask=True, do_flatmaps=False) - -# Produce flat moment-0 maps. This uses the flat masks to create -# moment-0 maps. - -if do_flatmaps: - this_der.loop_derive_products(do_convolve=False, do_noise=False, - do_strictmask=False, do_broadmask=False, - do_moments=False, do_secondary=False, - do_vfield=False, do_shuffling=False, - do_flatmask=False, do_flatmaps=True) diff --git a/run_pipeline_phangs-alma.py b/run_pipeline_phangs-alma.py new file mode 100644 index 00000000..99e01e98 --- /dev/null +++ b/run_pipeline_phangs-alma.py @@ -0,0 +1,292 @@ +import sys + +from casatasks import casalog + +# Add analysisUtils to the path. Make sure to set this to where you have analysisUtils downloaded! +au_path = "path/to/analysis_scripts" +sys.path.append(au_path) + +import phangsPipeline as ppl +from phangsPipeline.check_imports import is_casa_installed + +casa_enabled = is_casa_installed() + +# YOU SHOULD EDIT THINGS BELOW THIS # + +# Path to your master key +master_key_file = "path/to/master_key.txt" + +# Steps to run +do_singledish = False +do_staging = True +do_imaging = True +do_postprocess = True +do_derived = True +do_release = False + +# Targets to process +targets = [ + "some_exciting_galaxy", +] + +line_products = [ + "a_thrilling_line", +] +interf_configs = [ + "7m", + "12m", +] +feather_configs = [ + "7m+tp", + '12m+7m+tp', +] + +no_cont = True + +imaging_method = "tclean" + +# Switches for derived products +do_convolve = True +do_noise = True +do_strictmask = True +do_broadmask = True +do_moments = True +do_secondary = True + +# new DR5 routines (shuffling and flat maps) +do_vfield = False # creates a velocity field for shuffling +do_shuffling = True # runs independently from other tasks +do_flatmask = True # requires noise and broad/strict masks to join +do_flatmaps = True # requires flat masks + +# You should not need to edit below here + +# Setup logger +ppl.setup_logger(level="DEBUG", logfile=None) +casalog.filter("INFO") +casalog.showconsole(True) + +# Initialize the KeyHandler, which reads the master key and the files linked in the +# master key. +key_handler = ppl.KeyHandler(master_key=master_key_file) + +# Initialise other handlers +sd_handler = None +uv_handler = None +im_handler = None +pp_handler = None +derived_handler = None +release_handler = None + +if casa_enabled: + if do_singledish: + sd_handler = ppl.SingleDishHandler(key_handler=key_handler) + sd_handler.set_targets(only=targets) + sd_handler.set_line_products(only=line_products) + sd_handler.set_no_cont_products(no_cont) + if do_staging: + uv_handler = ppl.VisHandler(key_handler=key_handler) + uv_handler.set_targets(only=targets) + uv_handler.set_interf_configs(only=interf_configs) + uv_handler.set_line_products(only=line_products) + uv_handler.set_no_cont_products(no_cont) + if do_imaging: + im_handler = ppl.ImagingHandler(key_handler=key_handler) + im_handler.set_targets(only=targets) + im_handler.set_interf_configs(only=interf_configs) + im_handler.set_line_products(only=line_products) + im_handler.set_no_cont_products(no_cont) + if do_postprocess: + pp_handler = ppl.PostProcessHandler(key_handler=key_handler) + pp_handler.set_targets(only=targets) + pp_handler.set_interf_configs(only=interf_configs) + pp_handler.set_line_products(only=line_products) + pp_handler.set_feather_configs(only=feather_configs) + pp_handler.set_no_cont_products(no_cont) +if do_derived: + derived_handler = ppl.DerivedHandler(key_handler=key_handler) + derived_handler.set_targets(only=targets) + derived_handler.set_interf_configs(only=interf_configs) + derived_handler.set_feather_configs(only=feather_configs) + derived_handler.set_line_products(only=line_products) + derived_handler.set_no_cont_products(no_cont) +if do_release: + release_handler = ppl.ReleaseHandler(key_handler=key_handler) + release_handler.set_targets(only=targets) + release_handler.set_interf_configs(only=interf_configs) + release_handler.set_feather_configs(only=feather_configs) + release_handler.set_line_products(only=line_products) + release_handler.set_no_cont_products(no_cont) + +# Run things +key_handler.make_missing_directories( + imaging=do_staging, + postprocess=do_postprocess, + derived=do_derived, + release=do_release, +) + +if casa_enabled: + ############################################################################## + # Run singledish pipeline + ############################################################################## + # Run TP data through the pipeline, from calibration to imaging. + + if do_singledish: + sd_handler.loop_singledish(do_all=True) + + ############################################################################## + # Run staging + ############################################################################## + + # "Stage" the visibility data. This involves copying the original + # calibrated measurement set, continuum subtracting (if requested), + # extraction of requested lines and continuum data, regridding and + # concatenation into a single measurement set. The overwrite=True flag + # is needed to ensure that previous runs can be overwritten. + + if do_staging: + uv_handler.loop_stage_uvdata( + do_copy=True, + do_contsub=True, + do_extract_line=False, + do_extract_cont=False, + require_full_line_coverage=True, + do_remove_staging=False, + overwrite=True, + ) + + uv_handler.loop_stage_uvdata( + do_copy=False, + do_contsub=False, + do_extract_line=True, + do_extract_cont=False, + require_full_line_coverage=True, + do_remove_staging=False, + overwrite=True, + ) + + uv_handler.loop_stage_uvdata( + do_copy=False, + do_contsub=False, + do_extract_line=False, + do_extract_cont=True, + require_full_line_coverage=True, + do_remove_staging=False, + overwrite=True, + ) + + uv_handler.loop_stage_uvdata( + do_copy=False, + do_contsub=False, + do_extract_line=False, + do_extract_cont=False, + require_full_line_coverage=True, + do_remove_staging=True, + overwrite=True, + ) + + ############################################################################## + # Step through imaging + ############################################################################## + + # Image the concatenated, regridded visibility data. The full loop + # involves applying any user-supplied clean mask, multiscale imaging, + # mask generation for the single scale clean, and single scale + # clean. The individual parts can be turned on or off with flags to + # the imaging loop call but this call does everything. + + if do_imaging: + high_snr = 4.0 + low_snr = 2.0 + absolute = True + + convergence_fracflux = 0.01 + singlescale_threshold_value = 1 + + im_handler.loop_imaging( + imaging_method=imaging_method, + do_dirty_image=True, + do_revert_to_dirty=True, + do_read_clean_mask=True, + do_multiscale_clean=True, + do_revert_to_multiscale=True, + do_singlescale_mask=True, + singlescale_mask_absolute=absolute, + singlescale_mask_high_snr=high_snr, + singlescale_mask_low_snr=low_snr, + do_singlescale_clean=True, + do_revert_to_singlescale=True, + convergence_fracflux=convergence_fracflux, + singlescale_threshold_value=singlescale_threshold_value, + do_export_to_fits=True, + export_dirty=False, + export_multiscale=False, + overwrite=True, + ) + + ############################################################################## + # Step through postprocessing + ############################################################################## + + # Postprocess the data in CASA after imaging. This involves primary + # beam correction, linear mosaicking, feathering, conversion to Kelvin + # units, and some downsampling to save space. + + if do_postprocess: + pp_handler.loop_postprocess( + do_prep=True, + do_feather=True, + feather_before_mosaic=True, + do_mosaic=True, + do_cleanup=True, + imaging_method=imaging_method, + ) + +############################################################################## +# Step through derived product creation +############################################################################## + +# Run the calculations requested by the user. The steps are annotated +# here, but in general, do not change anything below this line. Just +# use the flags above to steer the calculation. +# 1) Convolve the post-processed data products to the various angular and +# physical resolutions specified in the keys. +# 2) Estimate the noise from the signal-free regions of the data to +# produce a three-dimensional noise model for each cube. +# 3) Construct "strict masks" for each cube at each resolution. +# 4) Combine the strict masks across all linked resolutions to form +# "broad masks" that have high completeness. +# 5) Apply the masks and use the cubes and noise models to produce moment +# maps with associated uncertainty. +# 6) Run a second round of moment calculations. This enables calculation +# of moments that depend on other, earlier moment map calculations +# 7) Create velocity field. This creates a combined velocity field from +# a list of derived mom1 maps. +# 8) Create shuffled cubes. This shuffles the processed and derived cubes +# by a velocity offset defined by an input velocity field. +# 9) Construct flat masks. This combines the existing signal masks +# with a velocity slab based on a input velocity field and velocity +# window. +# 10) Produce flat moment-0 maps. This uses the flat masks to create +# moment-0 maps. + +if do_derived: + derived_handler.loop_derive_products( + do_convolve=do_convolve, + do_noise=do_noise, + do_strictmask=do_strictmask, + do_broadmask=do_broadmask, + do_moments=do_moments, + do_secondary=do_secondary, + do_vfield=do_vfield, + do_shuffling=do_shuffling, + do_flatmask=do_flatmask, + do_flatmaps=do_flatmaps, + overwrite=True, + ) + +if do_release: + release_handler.loop_build_release() + +print("Complete!") diff --git a/phangsPipeline/casaBlankCleanRecipes.py b/scripts/blank_clean_recipes.py similarity index 89% rename from phangsPipeline/casaBlankCleanRecipes.py rename to scripts/blank_clean_recipes.py index 793bd2bb..692a9478 100644 --- a/phangsPipeline/casaBlankCleanRecipes.py +++ b/scripts/blank_clean_recipes.py @@ -1,4 +1,4 @@ -# This is a very simple SCRIPT (not a module) that will generate blank +# This is a very simple script that will generate blank # clean input files. These can be used as the basis of clean recipes. # The main use is to generate a new complete set of default-value diff --git a/phangsPipeline/calctimeonsource.py b/scripts/calctimeonsource.py similarity index 91% rename from phangsPipeline/calctimeonsource.py rename to scripts/calctimeonsource.py index 69bb784f..74f4c78a 100644 --- a/phangsPipeline/calctimeonsource.py +++ b/scripts/calctimeonsource.py @@ -1,9 +1,9 @@ -# Analysis utilities +import os + import analysisUtils as au -# Imports -from . import phangsLogger as pl -from . import handlerKeys as kh +from phangsPipeline import handlerKeys as kh +from phangsPipeline import phangsLogger as pl # Set the logging level pl.setup_logger(level='DEBUG', logfile=None) diff --git a/example_make_products.py b/scripts/example_make_products.py similarity index 100% rename from example_make_products.py rename to scripts/example_make_products.py diff --git a/examples_on_clusters/README.rst b/scripts/examples_on_clusters/README.rst similarity index 100% rename from examples_on_clusters/README.rst rename to scripts/examples_on_clusters/README.rst diff --git a/examples_on_clusters/imaging_in_chunks/README.rst b/scripts/examples_on_clusters/imaging_in_chunks/README.rst similarity index 100% rename from examples_on_clusters/imaging_in_chunks/README.rst rename to scripts/examples_on_clusters/imaging_in_chunks/README.rst diff --git a/examples_on_clusters/imaging_in_chunks/job_gather_chunks_to_cube.sh b/scripts/examples_on_clusters/imaging_in_chunks/job_gather_chunks_to_cube.sh similarity index 100% rename from examples_on_clusters/imaging_in_chunks/job_gather_chunks_to_cube.sh rename to scripts/examples_on_clusters/imaging_in_chunks/job_gather_chunks_to_cube.sh diff --git a/examples_on_clusters/imaging_in_chunks/jobarray_imaging_per_chunk.sh b/scripts/examples_on_clusters/imaging_in_chunks/jobarray_imaging_per_chunk.sh similarity index 100% rename from examples_on_clusters/imaging_in_chunks/jobarray_imaging_per_chunk.sh rename to scripts/examples_on_clusters/imaging_in_chunks/jobarray_imaging_per_chunk.sh diff --git a/examples_on_clusters/imaging_in_chunks/run_casa_find_nchunks.py b/scripts/examples_on_clusters/imaging_in_chunks/run_casa_find_nchunks.py similarity index 100% rename from examples_on_clusters/imaging_in_chunks/run_casa_find_nchunks.py rename to scripts/examples_on_clusters/imaging_in_chunks/run_casa_find_nchunks.py diff --git a/examples_on_clusters/imaging_in_chunks/run_casa_gather_chunks.py b/scripts/examples_on_clusters/imaging_in_chunks/run_casa_gather_chunks.py similarity index 100% rename from examples_on_clusters/imaging_in_chunks/run_casa_gather_chunks.py rename to scripts/examples_on_clusters/imaging_in_chunks/run_casa_gather_chunks.py diff --git a/examples_on_clusters/imaging_in_chunks/run_casa_imaging_perchunk.py b/scripts/examples_on_clusters/imaging_in_chunks/run_casa_imaging_perchunk.py similarity index 100% rename from examples_on_clusters/imaging_in_chunks/run_casa_imaging_perchunk.py rename to scripts/examples_on_clusters/imaging_in_chunks/run_casa_imaging_perchunk.py diff --git a/examples_on_clusters/imaging_per_target/README.rst b/scripts/examples_on_clusters/imaging_per_target/README.rst similarity index 100% rename from examples_on_clusters/imaging_per_target/README.rst rename to scripts/examples_on_clusters/imaging_per_target/README.rst diff --git a/examples_on_clusters/imaging_per_target/create_imaging_job_config_files.py b/scripts/examples_on_clusters/imaging_per_target/create_imaging_job_config_files.py similarity index 100% rename from examples_on_clusters/imaging_per_target/create_imaging_job_config_files.py rename to scripts/examples_on_clusters/imaging_per_target/create_imaging_job_config_files.py diff --git a/examples_on_clusters/imaging_per_target/jobarray_field_line_staging_imaging_job.sh b/scripts/examples_on_clusters/imaging_per_target/jobarray_field_line_staging_imaging_job.sh similarity index 100% rename from examples_on_clusters/imaging_per_target/jobarray_field_line_staging_imaging_job.sh rename to scripts/examples_on_clusters/imaging_per_target/jobarray_field_line_staging_imaging_job.sh diff --git a/examples_on_clusters/imaging_per_target/keys_hydra/cleanmask_key.txt b/scripts/examples_on_clusters/imaging_per_target/keys_hydra/cleanmask_key.txt similarity index 100% rename from examples_on_clusters/imaging_per_target/keys_hydra/cleanmask_key.txt rename to scripts/examples_on_clusters/imaging_per_target/keys_hydra/cleanmask_key.txt diff --git a/examples_on_clusters/imaging_per_target/keys_hydra/config_definitions.txt b/scripts/examples_on_clusters/imaging_per_target/keys_hydra/config_definitions.txt similarity index 100% rename from examples_on_clusters/imaging_per_target/keys_hydra/config_definitions.txt rename to scripts/examples_on_clusters/imaging_per_target/keys_hydra/config_definitions.txt diff --git a/examples_on_clusters/imaging_per_target/keys_hydra/config_lines/line_staging_imaging.all.1.jobconfig.txt b/scripts/examples_on_clusters/imaging_per_target/keys_hydra/config_lines/line_staging_imaging.all.1.jobconfig.txt similarity index 100% rename from examples_on_clusters/imaging_per_target/keys_hydra/config_lines/line_staging_imaging.all.1.jobconfig.txt rename to scripts/examples_on_clusters/imaging_per_target/keys_hydra/config_lines/line_staging_imaging.all.1.jobconfig.txt diff --git a/examples_on_clusters/imaging_per_target/keys_hydra/continuum_mosaic.clean b/scripts/examples_on_clusters/imaging_per_target/keys_hydra/continuum_mosaic.clean similarity index 100% rename from examples_on_clusters/imaging_per_target/keys_hydra/continuum_mosaic.clean rename to scripts/examples_on_clusters/imaging_per_target/keys_hydra/continuum_mosaic.clean diff --git a/examples_on_clusters/imaging_per_target/keys_hydra/cube_mosaic.clean b/scripts/examples_on_clusters/imaging_per_target/keys_hydra/cube_mosaic.clean similarity index 100% rename from examples_on_clusters/imaging_per_target/keys_hydra/cube_mosaic.clean rename to scripts/examples_on_clusters/imaging_per_target/keys_hydra/cube_mosaic.clean diff --git a/examples_on_clusters/imaging_per_target/keys_hydra/derived_key.txt b/scripts/examples_on_clusters/imaging_per_target/keys_hydra/derived_key.txt similarity index 100% rename from examples_on_clusters/imaging_per_target/keys_hydra/derived_key.txt rename to scripts/examples_on_clusters/imaging_per_target/keys_hydra/derived_key.txt diff --git a/examples_on_clusters/imaging_per_target/keys_hydra/dir_key.txt b/scripts/examples_on_clusters/imaging_per_target/keys_hydra/dir_key.txt similarity index 100% rename from examples_on_clusters/imaging_per_target/keys_hydra/dir_key.txt rename to scripts/examples_on_clusters/imaging_per_target/keys_hydra/dir_key.txt diff --git a/examples_on_clusters/imaging_per_target/keys_hydra/dir_key_mosaic.txt b/scripts/examples_on_clusters/imaging_per_target/keys_hydra/dir_key_mosaic.txt similarity index 100% rename from examples_on_clusters/imaging_per_target/keys_hydra/dir_key_mosaic.txt rename to scripts/examples_on_clusters/imaging_per_target/keys_hydra/dir_key_mosaic.txt diff --git a/examples_on_clusters/imaging_per_target/keys_hydra/distance_key.txt b/scripts/examples_on_clusters/imaging_per_target/keys_hydra/distance_key.txt similarity index 100% rename from examples_on_clusters/imaging_per_target/keys_hydra/distance_key.txt rename to scripts/examples_on_clusters/imaging_per_target/keys_hydra/distance_key.txt diff --git a/examples_on_clusters/imaging_per_target/keys_hydra/imaging_recipes.txt b/scripts/examples_on_clusters/imaging_per_target/keys_hydra/imaging_recipes.txt similarity index 100% rename from examples_on_clusters/imaging_per_target/keys_hydra/imaging_recipes.txt rename to scripts/examples_on_clusters/imaging_per_target/keys_hydra/imaging_recipes.txt diff --git a/examples_on_clusters/imaging_per_target/keys_hydra/linearmosaic_definitions.txt b/scripts/examples_on_clusters/imaging_per_target/keys_hydra/linearmosaic_definitions.txt similarity index 100% rename from examples_on_clusters/imaging_per_target/keys_hydra/linearmosaic_definitions.txt rename to scripts/examples_on_clusters/imaging_per_target/keys_hydra/linearmosaic_definitions.txt diff --git a/examples_on_clusters/imaging_per_target/keys_hydra/master_key.txt b/scripts/examples_on_clusters/imaging_per_target/keys_hydra/master_key.txt similarity index 100% rename from examples_on_clusters/imaging_per_target/keys_hydra/master_key.txt rename to scripts/examples_on_clusters/imaging_per_target/keys_hydra/master_key.txt diff --git a/examples_on_clusters/imaging_per_target/keys_hydra/moment_key.txt b/scripts/examples_on_clusters/imaging_per_target/keys_hydra/moment_key.txt similarity index 100% rename from examples_on_clusters/imaging_per_target/keys_hydra/moment_key.txt rename to scripts/examples_on_clusters/imaging_per_target/keys_hydra/moment_key.txt diff --git a/examples_on_clusters/imaging_per_target/keys_hydra/ms_file_key.txt b/scripts/examples_on_clusters/imaging_per_target/keys_hydra/ms_file_key.txt similarity index 100% rename from examples_on_clusters/imaging_per_target/keys_hydra/ms_file_key.txt rename to scripts/examples_on_clusters/imaging_per_target/keys_hydra/ms_file_key.txt diff --git a/examples_on_clusters/imaging_per_target/keys_hydra/overrides.txt b/scripts/examples_on_clusters/imaging_per_target/keys_hydra/overrides.txt similarity index 100% rename from examples_on_clusters/imaging_per_target/keys_hydra/overrides.txt rename to scripts/examples_on_clusters/imaging_per_target/keys_hydra/overrides.txt diff --git a/examples_on_clusters/imaging_per_target/keys_hydra/singledish_key.txt b/scripts/examples_on_clusters/imaging_per_target/keys_hydra/singledish_key.txt similarity index 100% rename from examples_on_clusters/imaging_per_target/keys_hydra/singledish_key.txt rename to scripts/examples_on_clusters/imaging_per_target/keys_hydra/singledish_key.txt diff --git a/examples_on_clusters/imaging_per_target/keys_hydra/target_definitions.txt b/scripts/examples_on_clusters/imaging_per_target/keys_hydra/target_definitions.txt similarity index 100% rename from examples_on_clusters/imaging_per_target/keys_hydra/target_definitions.txt rename to scripts/examples_on_clusters/imaging_per_target/keys_hydra/target_definitions.txt diff --git a/examples_on_clusters/imaging_per_target/run_casa_pipeline_stage_image_permosaic_jobarray.py b/scripts/examples_on_clusters/imaging_per_target/run_casa_pipeline_stage_image_permosaic_jobarray.py similarity index 100% rename from examples_on_clusters/imaging_per_target/run_casa_pipeline_stage_image_permosaic_jobarray.py rename to scripts/examples_on_clusters/imaging_per_target/run_casa_pipeline_stage_image_permosaic_jobarray.py diff --git a/phangsPipeline/print_uv_ranges.py b/scripts/print_uv_ranges.py similarity index 98% rename from phangsPipeline/print_uv_ranges.py rename to scripts/print_uv_ranges.py index 345c0727..fb213255 100644 --- a/phangsPipeline/print_uv_ranges.py +++ b/scripts/print_uv_ranges.py @@ -1,5 +1,7 @@ import glob +import analysisUtils as au + fname_list = [ 'ic5332_7m_co21.ms', 'ic5332_12m_co21.ms', diff --git a/run_casa_imaging_chunked_example.py b/scripts/run_casa_imaging_chunked_example.py similarity index 100% rename from run_casa_imaging_chunked_example.py rename to scripts/run_casa_imaging_chunked_example.py diff --git a/run_casa_imaging_chunked_ngc1097.py b/scripts/run_casa_imaging_chunked_ngc1097.py similarity index 100% rename from run_casa_imaging_chunked_ngc1097.py rename to scripts/run_casa_imaging_chunked_ngc1097.py diff --git a/run_casa_test_imaging_example.py b/scripts/run_casa_test_imaging_example.py similarity index 100% rename from run_casa_test_imaging_example.py rename to scripts/run_casa_test_imaging_example.py diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index cf205afd..00000000 --- a/setup.cfg +++ /dev/null @@ -1,88 +0,0 @@ -[metadata] -name = phangsPipeline -author = PHANGS Collaboration -author_email = leroy.42@osu.edu -license = MIT -license_file = LICENSE.rst -url = https://github.com/akleroy/phangs_imaging_scripts -description = A CASA pipeline to process and image calibrated radio interferometry data. -long_description = file: README.rst -long_description_content_type = text/x-rst -edit_on_github = False -github_project = akleroy/phangs_imaging_scripts - -[options] -zip_safe = False -packages = find: -setup_requires = setuptools_scm -install_requires = - scipy - matplotlib - spectral_cube - casatools>=6.4 - casatasks>=6.4 - casaplotms - casaviewer - almatasks - casampi - casashell - casadata - casampi - casaplotserver - - # Keeping here as a not on how to handle casa 6.1-6.3 - # But astropy/spectral-cube will lack recent updates since python=3.6 - # support has been dropped. - # scipy==1.5.2 # lock to py36 compatible version - # matplotlib==3.3 # lock to py36 compatible version - # spectral_cube==0.4.5 # lock to py36 compatible version - - -[options.extras_require] -test = - pytest-astropy -docs = - sphinx-astropy - -[options.package_data] -phangsPipeline = data/* - -[tool:pytest] -testpaths = "phangsPipeline" "docs" -astropy_header = true -doctest_plus = enabled -text_file_format = rst -addopts = --doctest-rst - -[coverage:run] -omit = - phangsPipeline/_astropy_init* - phangsPipeline/conftest.py - phangsPipeline/*setup_package* - phangsPipeline/tests/* - phangsPipeline/*/tests/* - phangsPipeline/extern/* - phangsPipeline/version* - */phangsPipeline/_astropy_init* - */phangsPipeline/conftest.py - */phangsPipeline/*setup_package* - */phangsPipeline/tests/* - */phangsPipeline/*/tests/* - */phangsPipeline/extern/* - */phangsPipeline/version* - -[coverage:report] -exclude_lines = - # Have to re-enable the standard pragma - pragma: no cover - # Don't complain about packages we have installed - except ImportError - # Don't complain if tests don't hit assertions - raise AssertionError - raise NotImplementedError - # Don't complain about script hooks - def main\(.*\): - # Ignore branches that don't pertain to this version of Python - pragma: py{ignore_python_version} - # Don't complain about IPython completion helper - def _ipython_key_completions_ diff --git a/setup.py b/setup.py deleted file mode 100755 index 422476cf..00000000 --- a/setup.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env python -# Licensed under a 3-clause BSD style license - see LICENSE.rst - -# NOTE: The configuration for the package, including the name, version, and -# other information are set in the setup.cfg file. - -import os -import sys - -from setuptools import setup - - -# First provide helpful messages if contributors try and run legacy commands -# for tests or docs. - -TEST_HELP = """ -Note: running tests is no longer done using 'python setup.py test'. Instead -you will need to run: - - tox -e test - -If you don't already have tox installed, you can install it with: - - pip install tox - -If you only want to run part of the test suite, you can also use pytest -directly with:: - - pip install -e .[test] - pytest - -For more information, see: - - http://docs.astropy.org/en/latest/development/testguide.html#running-tests -""" - -if 'test' in sys.argv: - print(TEST_HELP) - sys.exit(1) - -DOCS_HELP = """ -Note: building the documentation is no longer done using -'python setup.py build_docs'. Instead you will need to run: - - tox -e build_docs - -If you don't already have tox installed, you can install it with: - - pip install tox - -You can also build the documentation with Sphinx directly using:: - - pip install -e .[docs] - cd docs - make html - -For more information, see: - - http://docs.astropy.org/en/latest/install.html#builddocs -""" - -if 'build_docs' in sys.argv or 'build_sphinx' in sys.argv: - print(DOCS_HELP) - sys.exit(1) - -VERSION_TEMPLATE = """ -# Note that we need to fall back to the hard-coded version if either -# setuptools_scm can't be imported or setuptools_scm can't determine the -# version, so we catch the generic 'Exception'. -try: - from setuptools_scm import get_version - version = get_version(root='..', relative_to=__file__) -except Exception: - version = '{version}' -""".lstrip() - -setup(use_scm_version={'write_to': os.path.join('phangsPipeline', 'version.py'), - 'write_to_template': VERSION_TEMPLATE})