diff --git a/.all-contributorsrc b/.all-contributorsrc index 64258c003..c9e690f0e 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -446,7 +446,10 @@ "avatar_url": "https://avatars.githubusercontent.com/u/19339926?v=4", "profile": "https://whyjz.github.io/", "contributions": [ - "tutorial" + "tutorial", + "bug", + "code", + "review" ] }, { diff --git a/.coveragerc b/.coveragerc index 5d3a56beb..2f02d0030 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,7 +1,6 @@ [run] branch = True source = icepyx -omit = +omit = setup.py - test/* doc/* diff --git a/.flake8 b/.flake8 index 16b39f3fe..97a126574 100644 --- a/.flake8 +++ b/.flake8 @@ -1,7 +1,7 @@ [flake8] #GOAL: max_line_length = 79 or 99 max_line_length = 99 -per-file-ignores = +per-file-ignores = # too many leading '#' for block comment */tests/*:E266 # line too long (several test strs) @@ -12,7 +12,7 @@ per-file-ignores = doc/source/conf.py:E402 # GOAL: remove these ignores -ignore = +ignore = # line too long E501 # comparison syntax @@ -38,4 +38,4 @@ ignore = # GOAL: # syntax check doctests in docstrings - # doctests = True \ No newline at end of file + # doctests = True diff --git a/.github/workflows/binder-badge.yml b/.github/workflows/binder-badge.yml index 33979d3f6..22e7195e5 100644 --- a/.github/workflows/binder-badge.yml +++ b/.github/workflows/binder-badge.yml @@ -8,4 +8,4 @@ jobs: steps: - uses: manics/action-binderbadge@main with: - githubToken: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file + githubToken: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/get_pypi_stats.yml b/.github/workflows/get_pypi_stats.yml index 34e369aa6..3461ef953 100644 --- a/.github/workflows/get_pypi_stats.yml +++ b/.github/workflows/get_pypi_stats.yml @@ -34,4 +34,4 @@ jobs: author_name: learn2phoenix message: "Pypi stats auto-update" add: "./doc/source/tracking/pypistats/*" -# add: "./pypistats/*" \ No newline at end of file +# add: "./pypistats/*" diff --git a/.github/workflows/linter_actions.yml b/.github/workflows/linter_actions.yml index dc3836536..d2f8c31f6 100644 --- a/.github/workflows/linter_actions.yml +++ b/.github/workflows/linter_actions.yml @@ -11,9 +11,8 @@ jobs: steps: - uses: actions/checkout@v4 - name: Run black linter - uses: psf/black@stable + uses: psf/black@stable # use the flake8 linter to annotate improperly formatted code # note linter arguments are supplied via the .flake8 config file - name: Annotate PR after running flake8 uses: TrueBrain/actions-flake8@v2 - diff --git a/.github/workflows/publish_to_pypi.yml b/.github/workflows/publish_to_pypi.yml index 0841712c4..91a19785e 100644 --- a/.github/workflows/publish_to_pypi.yml +++ b/.github/workflows/publish_to_pypi.yml @@ -33,7 +33,7 @@ jobs: python-version: 3.9 - name: Install dependencies - run: python -m pip install setuptools wheel + run: python -m pip install build setuptools wheel # This step is only necessary for testing purposes and for TestPyPI - name: Fix up version string for TestPyPI @@ -46,7 +46,7 @@ jobs: - name: Build source and wheel distributions run: | - python setup.py sdist bdist_wheel + python -m build echo "" echo "Generated files:" ls -lh dist/ diff --git a/.github/workflows/traffic_action.yml b/.github/workflows/traffic_action.yml index 5a0028f3e..97705285f 100644 --- a/.github/workflows/traffic_action.yml +++ b/.github/workflows/traffic_action.yml @@ -1,7 +1,7 @@ # temporarily store GitHub traffic data # from: https://github.com/marketplace/actions/repository-traffic on: - schedule: + schedule: # runs once a week on sunday - cron: "55 23 * * 0" # Trigger manually at https://github.com/icesat2py/icepyx/actions/workflows/publish_to_pypi.yml @@ -20,19 +20,19 @@ jobs: - uses: actions/checkout@v3 with: ref: "traffic" - + # Calculates traffic and clones and stores in CSV file - - name: GitHub traffic + - name: GitHub traffic uses: sangonzal/repository-traffic-action@v0.1.4 env: - TRAFFIC_ACTION_TOKEN: ${{ secrets.TRAFFIC_ACTION_TOKEN }} - + TRAFFIC_ACTION_TOKEN: ${{ secrets.TRAFFIC_ACTION_TOKEN }} + # Custom steps to move files to another dir and format plot - name: Move files run: | pip install matplotlib pandas python ./doc/source/tracking/traffic/traffic_data_mgmt.py - + # Commits files to repository - name: Commit changes uses: EndBug/add-and-commit@v9 diff --git a/.github/workflows/uml_action.yml b/.github/workflows/uml_action.yml index db0a59fa6..b85885023 100644 --- a/.github/workflows/uml_action.yml +++ b/.github/workflows/uml_action.yml @@ -3,6 +3,7 @@ on: pull_request_review: types: [submitted] branches: development + workflow_dispatch: jobs: diagrams: @@ -18,12 +19,13 @@ jobs: run: | pip install -U pip pip install -r requirements.txt - pip install icepyx pylint + pip install icepyx pylint sudo apt-get install graphviz - name: run pyreverse run: | pyreverse ./icepyx/core -p user_uml -o svg - pyreverse ./icepyx/core -f ALL -p dev_uml -o svg + pyreverse ./icepyx/quest -p quest_user_uml -o svg + pyreverse ./icepyx/core ./icepyx/quest -f ALL -p dev_uml -o svg rm ./packages_dev_uml.svg mv ./*.svg ./doc/source/user_guide/documentation/ - name: Commit changes diff --git a/.mailmap b/.mailmap index 84f19933f..8e5cea899 100644 --- a/.mailmap +++ b/.mailmap @@ -19,4 +19,3 @@ David Shean Anna Valentine <65192768+annavalentine@users.noreply.github.com> Bidhyananda Yadav Friedrich Knuth - diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 57a5b81bf..c7c0d675a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,19 @@ repos: - repo: https://github.com/psf/black - rev: 23.12.0 + rev: 24.4.2 hooks: - id: black -# you can run `pre-commit autoupdate` to automatically update to the latest version of hooks! \ No newline at end of file +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 # Use the ref you want to point at + hooks: + - id: check-added-large-files + args: ["--maxkb=5000"] + - id: end-of-file-fixer + - id: trailing-whitespace + args: [--markdown-linebreak-ext=md] + +ci: + autoupdate_schedule: monthly + +# you can run `pre-commit autoupdate` to automatically update to the latest version of hooks! diff --git a/.travis.yml b/.travis.yml index b2c4ccde6..b3d7081e7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,8 +28,16 @@ jobs: include: - stage: basic tests script: pytest icepyx/ --verbose --cov app --ignore icepyx/tests/test_behind_NSIDC_API_login.py - after_success: codecov/codecov-action@v3 - + # includes an integrity check of the uploader as recommended by CodeCov + after_success: + - curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --keyring trustedkeys.gpg --import # One-time step + - curl -Os https://uploader.codecov.io/latest/linux/codecov + - curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM + - curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig + - gpgv codecov.SHA256SUM.sig codecov.SHA256SUM + - shasum -a 256 -c codecov.SHA256SUM + - chmod +x codecov + - ./codecov -t ${CODECOV_TOKEN} - stage: behind Earthdata script: - export EARTHDATA_PASSWORD=$NSIDC_LOGIN diff --git a/ATTRIBUTION.rst b/ATTRIBUTION.rst index 372d43897..655a678db 100644 --- a/ATTRIBUTION.rst +++ b/ATTRIBUTION.rst @@ -4,7 +4,7 @@ Recognizing Contributions ========================= We are extremely grateful to everyone who has contributed to the success of the icepyx community and software. -This document outlines our goals to give appropriate attribution to all contributors to icepyx in ways that are fair and diverse and supportive of professional goals. +This document outlines our goals to give appropriate attribution to all contributors to icepyx in ways that are fair and diverse and supportive of professional goals. We define *contributions* broadly as: Efforts towards achieving icepyx's goals, including (1) writing code, tests, or documentation, @@ -21,11 +21,11 @@ We recognize contributions in the following ways. Contributors List ----------------- -This project follows the `all-contributors `_ specification. +This project follows the `all-contributors `_ specification. When you contribute to icepyx for the first time or in a new way, you or a maintainer can use the `All Contributors bot to open a PR `_` to recognize your contribution. Comment on an existing PR with `@all-contributors please add @ for `. -This will add you (or your new contribution type) to the ``CONTRIBUTORS.rst`` file located in the top level directory; +This will add you (or your new contribution type) to the ``CONTRIBUTORS.rst`` file located in the top level directory; the file is packaged and distributed with icepyx, so each release has a record of contributors and their contribution types. @@ -41,8 +41,8 @@ If the user's full name is not available on GitHub, their GitHub handle is used. Example Workflows ----------------- -Many of the example workflows included within icepyx were developed by individuals or small teams for educational or research purposes. -We encourage example developers to provide proper recognition for these efforts both within the notebook itself and +Many of the example workflows included within icepyx were developed by individuals or small teams for educational or research purposes. +We encourage example developers to provide proper recognition for these efforts both within the notebook itself and by adding contributors to the `Contributors List`_ for attribution as describered herein. @@ -58,7 +58,7 @@ we collectively represent the icepyx authors in citations (including Zenodo rele As described above, a complete list of contributors and their contribution types is available via the `Contributors List`_. - ** A note about releases `_ has an entire `Attribution Working Group `_ dedicated to working on attribution for research products. -`URSSI `_ hosted a workshop in 2019 (`report `_) +`URSSI `_ hosted a workshop in 2019 (`report `_) to identify core issues and propose solutions to challenges around software credit. For software, current best practices (`e.g. `_) emphasize the importance of having a document such as this one to describe an individual community's policies for credit, authorship, and attribution. -This document is an effort to describe icepyx's policies, with an awareness that they may change +This document is an effort to describe icepyx's policies, with an awareness that they may change to accomodate community growth, best practices, and feedback. We do not attempt to identify contribution levels through the number of commits made to the repository (e.g. ``git shortlog -sne``) or active engagement on GitHub (e.g. through issues, discussions, and pull requests) and Discourse. -The latter is difficult to quantify, and the use of squash merges into the development branch can mask the relative complexity +The latter is difficult to quantify, and the use of squash merges into the development branch can mask the relative complexity of various contributions and does not necessarily capture significant conceptual contributions. -Copyright notice: Preparation of this document and our credit policies was inspired in part by these `authorship guidelines `_ provided by `Fatiando a Terra `_ +Copyright notice: Preparation of this document and our credit policies was inspired in part by these `authorship guidelines `_ provided by `Fatiando a Terra `_ and `The Turing Way `_. We encourage potential contributors to consider the resources provided by the `NASA High Mountain Asia Team (HiMAT) `_ and established or emerging best practices in their community. diff --git a/CITATION.cff b/CITATION.cff index f7e0ba025..986f4dc89 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -32,4 +32,4 @@ license: BSD-3-Clause identifiers: - description: "All archived versions of icepyx." type: doi - value: 10.5281/zenodo.7729175 \ No newline at end of file + value: 10.5281/zenodo.7729175 diff --git a/CITATION.rst b/CITATION.rst index 37b88cf97..449a302cf 100644 --- a/CITATION.rst +++ b/CITATION.rst @@ -7,7 +7,7 @@ icepyx ------ This community and software is developed with the goal of supporting science applications. -Thus, our contributors (including those who have developed the packages used within icepyx) +Thus, our contributors (including those who have developed the packages used within icepyx) and maintainers justify their efforts and demonstrate the impact of their work through citations. If you have used icepyx in your work, please consider citing our library. @@ -30,16 +30,16 @@ A bibtex version for users working in Latex:: publisher = {Zenodo}, url = "https://github.com/icesat2py/icepyx", } - + For more information on the "icepyx Developers", please see our `Attribution Guidelines `_. See our docs for a `full list of contributors `_ and their contribution types. icepyx Dependencies ------------------- -If you have used one of the included packages to extend your data analysis capabilities within icepyx, -please consider additionally citing that work, because it represents an independent software contribution to the open-source community. -`SciPy `_ provides a `helpful resource `_ for citing -packages within the SciPy ecosystem (including Matplotlib, NumPy, pandas, and SciPy). +If you have used one of the included packages to extend your data analysis capabilities within icepyx, +please consider additionally citing that work, because it represents an independent software contribution to the open-source community. +`SciPy `_ provides a `helpful resource `_ for citing +packages within the SciPy ecosystem (including Matplotlib, NumPy, pandas, and SciPy). Links to citation information for other commonly used packages are below. - `fiona `_ @@ -52,4 +52,4 @@ Links to citation information for other commonly used packages are below. ICESat-2 Data ------------- ICESat-2 data citation depends on the exact dataset used. -Citation information for each data product can be found through the `NSIDC website `_. \ No newline at end of file +Citation information for each data product can be found through the `NSIDC website `_. diff --git a/CONTRIBUTORS.rst b/CONTRIBUTORS.rst index 302cc5fd4..5d5c4530c 100644 --- a/CONTRIBUTORS.rst +++ b/CONTRIBUTORS.rst @@ -45,7 +45,7 @@ Thanks goes to these wonderful people (`emoji key Tom Johnson
Tom Johnson

πŸ“– πŸš‡ Tyler Sutterley
Tyler Sutterley

πŸ“– πŸ’» πŸ€” πŸ’¬ πŸ›‘οΈ ⚠️ Wei Ji
Wei Ji

πŸ› πŸ’» πŸ“– πŸ’‘ πŸ€” πŸš‡ 🚧 πŸ§‘β€πŸ« πŸ’¬ πŸ‘€ ⚠️ πŸ“’ - Whyjay Zheng
Whyjay Zheng

βœ… + Whyjay Zheng
Whyjay Zheng

βœ… πŸ› πŸ’» πŸ‘€ Wilson Sauthoff
Wilson Sauthoff

πŸ‘€ @@ -66,7 +66,7 @@ Thanks goes to these wonderful people (`emoji key - + diff --git a/README.rst b/README.rst index 20709cb61..c0e4b6999 100644 --- a/README.rst +++ b/README.rst @@ -7,18 +7,18 @@ icepyx Latest release (main branch): |Docs Status main| |Travis main Build Status| |Code Coverage main| -Current development version (development branch): |Docs Status dev| |Travis dev Build Status| |Code Coverage dev| +Current development version (development branch): |Docs Status dev| |Travis dev Build Status| |Code Coverage dev| |Pre-commit dev| .. |GitHub license| image:: https://img.shields.io/badge/License-BSD%203--Clause-blue.svg :target: https://opensource.org/licenses/BSD-3-Clause -.. |Conda install| image:: https://anaconda.org/conda-forge/icepyx/badges/version.svg +.. |Conda install| image:: https://anaconda.org/conda-forge/icepyx/badges/version.svg :target: https://anaconda.org/conda-forge/icepyx .. |Pypi install| image:: https://badge.fury.io/py/icepyx.svg :target: https://pypi.org/project/icepyx -.. |Contributors| image:: https://img.shields.io/badge/all_contributors-34-orange.svg?style=flat-square +.. |Contributors| image:: https://img.shields.io/badge/all_contributors-40-orange.svg?style=flat-square :alt: All Contributors :target: https://github.com/icesat2py/icepyx/blob/main/CONTRIBUTORS.rst @@ -43,17 +43,21 @@ Current development version (development branch): |Docs Status dev| |Travis dev .. |Code Coverage dev| image:: https://codecov.io/gh/icesat2py/icepyx/branch/development/graph/badge.svg :target: https://codecov.io/gh/icesat2py/icepyx - + +.. |Pre-commit dev| image:: https://results.pre-commit.ci/badge/github/icesat2py/icepyx/development.svg + :target: https://results.pre-commit.ci/latest/github/icesat2py/icepyx/development + :alt: pre-commit.ci status + Origin and Purpose ------------------ -icepyx is both a software library and a community composed of ICESat-2 data users, developers, and the scientific community. We are working together to develop a shared library of resources - including existing resources, new code, tutorials, and use-cases/examples - that simplify the process of querying, obtaining, analyzing, and manipulating ICESat-2 datasets to enable scientific discovery. +icepyx is both a software library and a community composed of ICESat-2 data users, developers, and the scientific community. We are working together to develop a shared library of resources - including existing resources, new code, tutorials, and use-cases/examples - that simplify the process of querying, obtaining, analyzing, and manipulating ICESat-2 and (via the QUEST module) relevant ancillary datasets to enable scientific discovery. icepyx aims to provide a clearinghouse for code, functionality to improve interoperability, documentation, examples, and educational resources that tackle disciplinary research questions while minimizing the amount of repeated effort across groups utilizing similar datasets. icepyx also hopes to foster collaboration, open-science, and reproducible workflows by integrating and sharing resources. -Many of the underlying tools from which icepyx was developed began as Jupyter Notebooks developed for and during the cryosphere-themed ICESat-2 Hackweek at the University of Washington in June 2019 or as scripts written and used by the ICESat-2 Science Team members. -Originally called icesat2py, the project combined and generalized these scripts into a unified framework, adding examples, documentation, and testing where necessary and making them accessible for everyone. -icepyx is now a domain-agnostic, standalone software package and community (under the broader `icesat2py GitHub organization `_) that continues to build functionality for obtaining and working with ICESat-2 data products locally and in the cloud. +Many of the underlying tools from which icepyx was developed began as Jupyter Notebooks developed for and during the cryosphere-themed ICESat-2 Hackweek at the University of Washington in June 2019 or as scripts written and used by the ICESat-2 Science Team members. +Originally called icesat2py, the project combined and generalized these scripts into a unified framework, adding examples, documentation, and testing where necessary and making them accessible for everyone. Similar scripts were conceived at the 2021 Hackweek, which in turn led to the development of QUEST. +icepyx is now a domain-agnostic, standalone software package and community (under the broader `icesat2py GitHub organization `_) that continues to build functionality for obtaining and working with ICESat-2 data products locally and in the cloud. It also improves interoperability for ICESat-2 datasets with other open-source tools. .. _`zipped file`: https://github.com/icesat2py/icepyx/archive/main.zip @@ -65,7 +69,7 @@ Installation The simplest way to install icepyx is by using the `conda `__ package manager. |Conda install| - + conda install icepyx Alternatively, you can also install icepyx using `pip `__. |Pypi install| @@ -91,7 +95,9 @@ Listed below are example Jupyter notebooks for working with ICESat-2 (IS2). `IS2_data_read-in `_ -`IS2_cloud_data_access (BETA ONLY) `_ +`IS2_cloud_data_access `_ + +`QUEST_Finding_Argo_IS2_data `_ Citing icepyx @@ -120,7 +126,7 @@ Please note that this project is released with a Contributor Code of Conduct. By .. |Contributor Covenant| image:: https://img.shields.io/badge/Contributor%20Covenant-v2.0%20adopted-ff69b4.svg :target: code_of_conduct.md - + Research notice ~~~~~~~~~~~~~~~ diff --git a/code_of_conduct.md b/code_of_conduct.md index 03750a6c6..e4aab0d6e 100644 --- a/code_of_conduct.md +++ b/code_of_conduct.md @@ -82,4 +82,3 @@ Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcem For answers to common questions about this code of conduct, see the FAQ at https://www.contributor-covenant.org/faq. Translations are available at https://www.contributor-covenant.org/translations. - diff --git a/doc/source/community/contact.rst b/doc/source/community/contact.rst index cceca5d01..f84ac1f6f 100644 --- a/doc/source/community/contact.rst +++ b/doc/source/community/contact.rst @@ -18,7 +18,7 @@ Our team (developers, users, scientists, educators) consists primarily of volunt We meet on an as-needed basis via Zoom to provide support, troubleshoot issues, and plan development and collaboration efforts. Our meetings are open to all, with upcoming meeting information available via `Discourse `_, `GitHub Discussions `_, or by request. -The :ref:`QUEST (Query Unify Explore SpatioTemporal) ` team meets weekly on Mondays to co-work on integrating additional sensors into the icepyx workflow. +The :ref:`QUEST (Query Unify Explore SpatioTemporal) ` team meets weekly on Mondays to co-work on integrating additional sensors into the icepyx workflow. Please contact us if you are interested in joining the QUEST team. @@ -27,4 +27,4 @@ Ongoing Efforts In addition to the ongoing development of icepyx itself, the ICESat-2 community continues to grow through a number of related initiatives, workshops, and events: * `CryoCloud `_ -* `ICESat-2 Hackweeks `_ \ No newline at end of file +* `ICESat-2 Hackweeks `_ diff --git a/doc/source/community/resources.rst b/doc/source/community/resources.rst index c0c93ab12..8d60fdee7 100644 --- a/doc/source/community/resources.rst +++ b/doc/source/community/resources.rst @@ -3,19 +3,19 @@ ICESat-2 Resource Guide ======================= -This guide contains information regarding available resources for working with ICESat-2 datasets, -both specifically (e.g. for ICESat-2 data) and more broadly (e.g. point cloud analysis of LiDAR datasets). -It includes resources formally developed by/with support from NASA as well as individual and +This guide contains information regarding available resources for working with ICESat-2 datasets, +both specifically (e.g. for ICESat-2 data) and more broadly (e.g. point cloud analysis of LiDAR datasets). +It includes resources formally developed by/with support from NASA as well as individual and community efforts stemming from personal interest to ongoing research workflows. -Please feel free to add your project or another resource to this guide by submitting a pull request. +Please feel free to add your project or another resource to this guide by submitting a pull request. We reserve the right to reject suggested resources that fall outside the scope of icepyx. Other Ways to Access ICESat-2 Data ---------------------------------- -icepyx aims to provide intuitive, object-based methods for finding, obtaining, visualizing, and analyzing ICESat-2 data as part of an open, -reproducible workflow that leverages existing tools wherever possible (see :ref:`Complementary GitHub Repos`) -and can be run locally, using high performance computing, or in the cloud using Pangeo. +icepyx aims to provide intuitive, object-based methods for finding, obtaining, visualizing, and analyzing ICESat-2 data as part of an open, +reproducible workflow that leverages existing tools wherever possible (see :ref:`Complementary GitHub Repositories `) +and can be run locally, using high performance computing, or in the cloud using Pangeo. A few other options available for querying, visualizing, and downloading ICESat-2 data files are: - `NSIDC (DAAC) Data Access `_ diff --git a/doc/source/community/resources/2019_IS2_HW.rst b/doc/source/community/resources/2019_IS2_HW.rst index d43ea2691..01ac9331a 100644 --- a/doc/source/community/resources/2019_IS2_HW.rst +++ b/doc/source/community/resources/2019_IS2_HW.rst @@ -2,10 +2,10 @@ First ICESat-2 Cryospheric Hackweek at the University of Washington (June 2019) ------------------------------------------------------------------------------- -This June 2019 event resulted in the production of a series of `tutorials `_, -developed primarily by members of the ICESat-2 Science Team and early data users, -aimed at educating the cryospheric community in obtaining and using ICESat-2 datasets. -During the actual Hackweek, teams of researchers and data scientists developed a series of interesting +This June 2019 event resulted in the production of a series of `tutorials `_, +developed primarily by members of the ICESat-2 Science Team and early data users, +aimed at educating the cryospheric community in obtaining and using ICESat-2 datasets. +During the actual Hackweek, teams of researchers and data scientists developed a series of interesting `projects `_ related to their interests/research. Many of these resources were drawn from in the initial development of `icepyx`. @@ -14,7 +14,7 @@ Many of these resources were drawn from in the initial development of `icepyx`. Tutorials |Zenodo badge| ^^^^^^^^^^^^^^^^^^^^^^^^ -The available tutorials, most of which contain one or more Jupyter Notebooks to illustrate concepts, are listed below. +The available tutorials, most of which contain one or more Jupyter Notebooks to illustrate concepts, are listed below. Additional information for citing (including licensing) and running (e.g. through a Pangeo Binder) these tutorials can be found at the above link. They are published on `Zenodo `_. @@ -73,4 +73,4 @@ an provide useful starting points to develop effective cryospheric workflows whe - `Topohack `_ - - Resolve topography over complex terrain \ No newline at end of file + - Resolve topography over complex terrain diff --git a/doc/source/community/resources/2020_IS2_HW.rst b/doc/source/community/resources/2020_IS2_HW.rst index 79a68c432..ee447b61a 100644 --- a/doc/source/community/resources/2020_IS2_HW.rst +++ b/doc/source/community/resources/2020_IS2_HW.rst @@ -2,20 +2,20 @@ Second [Virtual] ICESat-2 Cryospheric Hackweek Facilitated by the University of Washington ------------------------------------------------------------------------------------------ -The `2020 ICESat-2 Cryospheric Science Hackweek `_ was the -first virtual Hackweek held by the University of Washington. -While originally planned as a five-day, in-person workshop, the event was shifted to a fully virtual/remote setting in light of -stay-at-home orders and travel restrictions in place to curb the spread of COVID-19. - -To accomodate multiple time zones and limit the daily duration of online tutorial sessions, the event was spread out over the course of ten days. -The first week had three half-days of interactive tutorials/lectures. -The second week had four days that included some interactive tutorials/lectures and scheduled times where instructors were +The `2020 ICESat-2 Cryospheric Science Hackweek `_ was the +first virtual Hackweek held by the University of Washington. +While originally planned as a five-day, in-person workshop, the event was shifted to a fully virtual/remote setting in light of +stay-at-home orders and travel restrictions in place to curb the spread of COVID-19. + +To accomodate multiple time zones and limit the daily duration of online tutorial sessions, the event was spread out over the course of ten days. +The first week had three half-days of interactive tutorials/lectures. +The second week had four days that included some interactive tutorials/lectures and scheduled times where instructors were available to help participants with a facilitated exploration of datasets and hands-on software development. -This June 2020 event resulted in the production of a series of `tutorials `_, +This June 2020 event resulted in the production of a series of `tutorials `_, developed by volunteer instructors and presented during the event. -During the actual Hackweek, teams of researchers and data scientists developed a series of interesting +During the actual Hackweek, teams of researchers and data scientists developed a series of interesting `projects `_ related to their interests/research. .. |Zenodo badge| image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3966463.svg @@ -30,13 +30,13 @@ The published tutorial repo also includes links to presentation slides and video Tutorial Topics: -1. Introductory Session +1. Introductory Session 1. ICESat-2 Mission: Satellite, Sensor, and Data -1. Git and GitHub +1. Git and GitHub 1. Jupyter and iPython 1. Geospatial Analysis with Python -1. Introduction to ICESat-2 Sea Ice and Land Ice Products and Data Access -1. Programmatic ICESat-2 data access +1. Introduction to ICESat-2 Sea Ice and Land Ice Products and Data Access +1. Programmatic ICESat-2 data access 1. Introduction to HDF5 and ICESat-2 data files 1. Land ice applications 1. Sea ice applications @@ -94,4 +94,4 @@ can provide useful starting points to develop effective cryospheric workflows wh - Compare ICESat-2 elevations with multiple DEM raster data types. - Quantify errors and compare results regionally - - Contributed additional authentication methods to icepyx and provided initial code for what eventually became the Visualization module. \ No newline at end of file + - Contributed additional authentication methods to icepyx and provided initial code for what eventually became the Visualization module. diff --git a/doc/source/community/resources/IS2_software.rst b/doc/source/community/resources/IS2_software.rst index da97ab685..fc0edaf56 100644 --- a/doc/source/community/resources/IS2_software.rst +++ b/doc/source/community/resources/IS2_software.rst @@ -1,6 +1,6 @@ Open-Source Packages -------------------- -ICESat-2 can be tricky to process for the first time, especially if working with the ATL03 data. Software packages have been developed to make ICESat-2 data analysis easier for new and experienced users. +ICESat-2 can be tricky to process for the first time, especially if working with the ATL03 data. Software packages have been developed to make ICESat-2 data analysis easier for new and experienced users. Here, we highlight some commonly-used software packages developed by the science community. Most of these can be used alongside Icepyx to facilitate ICESat-2 data processing. Most of these packages are callable through Python, though others may require access to other software. Keep this in mind before attempting to use any package or plugin. @@ -10,31 +10,33 @@ Most of these packages are callable through Python, though others may require ac - A Python client to process ICESat-2 ATL03 data prior to download. - Aggregates ATL03 data into line segments of user-defined length, creating a customized form of the ATL06 product. - Data may also be subsetted based on spatial bounds and photon classification. - + * `IceFlow `_ - + - by National Snow and Ice Data Center (NSIDC) - A Python library designed to simplify the co-registration of cryospheric datasets. - Matches georeferencing parameters across data sets, allowing a user to derive a time series across multiple datasets for a given region. - Currently valid datasets include ICESat, ICESat-2, and Operation IceBridge. - + * `PhoREAL `_ - by Applied Research Laboratories, University of Texas at Austin - A Python-based toolbox that may also be run as a GUI (Windows only). - Allows for quick processing of ATL03/08 data, which may then be used to generate 2-D plots of ICESat-2 surface heights. - Users may also convert processed data to .las, .csv, and .kml file formats. - + + +.. _complementary_GH_repos_label: Complementary GitHub Repositories --------------------------------- -Here we describe a selection of publicly available Python code posted on GitHub with applicability for working with ICESat-2 data. -This includes repositories that are more broadly designed for working with LiDAR/point cloud datasets in general. -These repositories represent independent but complimentary projects that we hope to make easily interoperable within icepyx in order to maximize capabilities and minimize duplication of efforts. +Here we describe a selection of publicly available Python code posted on GitHub with applicability for working with ICESat-2 data. +This includes repositories that are more broadly designed for working with LiDAR/point cloud datasets in general. +These repositories represent independent but complimentary projects that we hope to make easily interoperable within icepyx in order to maximize capabilities and minimize duplication of efforts. Conversations about how to best accomplish this have been ongoing since the conception of icepyx, and we welcome everyone to join the conversation (please see our :ref:`contact page`). -*Note: This list is a compilation of publicly available GitHub repositories and includes some annotations to reflect how they relate to icepyx. -Please check each repository's licensing information before using or modifying their code. +*Note: This list is a compilation of publicly available GitHub repositories and includes some annotations to reflect how they relate to icepyx. +Please check each repository's licensing information before using or modifying their code. Additional resources having to do specifically with obtaining ICESat-2 data are noted in the last section of this document.* * `captoolkit `_ @@ -70,7 +72,7 @@ Additional resources having to do specifically with obtaining ICESat-2 data are MATLAB Packages --------------- * `PhotonLabeler `_ - + - by Lonesome Malambo - A MATLAB-based user interface that allows for manual interpretation of ICESat-2 photons. - Users may classify photons based on surface type, signal/noise likelihood, or other user-defined labels. diff --git a/doc/source/conf.py b/doc/source/conf.py index a77670850..868af3fbc 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -77,7 +77,7 @@ autosectionlabel_prefix_document = True autosummary_generate = True numpydoc_show_class_members = False -jupyter_execute_notebooks = "off" +nb_execution_mode = "off" suppress_warnings = ["myst.header"] # suppress non-consecutive header warning # -- Options for HTML output ------------------------------------------------- @@ -90,7 +90,7 @@ html_theme_options = { "logo_only": True, "display_version": False, - "prev_next_buttons_location": None, + "prev_next_buttons_location": "bottom", "navigation_depth": 4, "collapse_navigation": True, } diff --git a/doc/source/contributing/attribution_link.rst b/doc/source/contributing/attribution_link.rst index ef68beae6..6d294c865 100644 --- a/doc/source/contributing/attribution_link.rst +++ b/doc/source/contributing/attribution_link.rst @@ -1 +1 @@ -.. include:: ../../../ATTRIBUTION.rst \ No newline at end of file +.. include:: ../../../ATTRIBUTION.rst diff --git a/doc/source/contributing/code_of_conduct_link.rst b/doc/source/contributing/code_of_conduct_link.rst index 7f538460b..d350192da 100644 --- a/doc/source/contributing/code_of_conduct_link.rst +++ b/doc/source/contributing/code_of_conduct_link.rst @@ -1,2 +1,2 @@ .. include:: ../../../code_of_conduct.md - :parser: myst_parser.sphinx_ \ No newline at end of file + :parser: myst_parser.sphinx_ diff --git a/doc/source/contributing/contribution_guidelines.rst b/doc/source/contributing/contribution_guidelines.rst index 1a359118b..748798560 100644 --- a/doc/source/contributing/contribution_guidelines.rst +++ b/doc/source/contributing/contribution_guidelines.rst @@ -58,7 +58,9 @@ By having a *development* branch for daily work, we enable the *main* branch to First Steps ^^^^^^^^^^^ -Before you begin writing code, please first check out our issues page. Someone may already be working on the same problem, and you may be able to contribute directly to their efforts. If not, create a new issue to describe what you plan to do. +Before you begin writing code, please first check out our issues page. +Someone may already be working on the same problem, and you may be able to contribute directly to their efforts. +If not, create a new issue to describe what you plan to do. General Guidelines ^^^^^^^^^^^^^^^^^^ @@ -73,7 +75,10 @@ General Guidelines Basic Steps to Contribute ^^^^^^^^^^^^^^^^^^^^^^^^^ -We encourage users to follow the `git pull request workflow `_. For more detailed steps, please see :ref:`How to Contribute`. +We encourage users to follow the `git pull request workflow `_. +For more detailed steps, please see :ref:`How to Contribute`. + +For users that would like to add a dataset to the QUEST module, we are currently developing a Jupyter notebook to guide users through the necessary steps. Licensing ^^^^^^^^^ diff --git a/doc/source/contributing/contributors_link.rst b/doc/source/contributing/contributors_link.rst index b7f2de06e..80d654af8 100644 --- a/doc/source/contributing/contributors_link.rst +++ b/doc/source/contributing/contributors_link.rst @@ -1 +1 @@ -.. include:: ../../../CONTRIBUTORS.rst \ No newline at end of file +.. include:: ../../../CONTRIBUTORS.rst diff --git a/doc/source/contributing/development_plan.rst b/doc/source/contributing/development_plan.rst index 9b42b5105..c8327b98e 100644 --- a/doc/source/contributing/development_plan.rst +++ b/doc/source/contributing/development_plan.rst @@ -19,7 +19,7 @@ enabling easy end-to-end data visualization and providing a simple, community-ba Open Science Example Use Cases ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Research is the primary driver for development of icepyx functionality. -We encourage you to use icepyx as a framework for finding and processing your ICESat-2 data, +We encourage you to use icepyx as a framework for finding and processing your ICESat-2 data, from designing your analysis to writing code to analyze your data to generating presentation-quality figures. We welcome example use cases from all disciplines. Some topics currently being investigated using ICESat-2 data: @@ -46,8 +46,8 @@ Workflows showcasing complex analyses to answer pressing science questions provi Validation and Integration with Other Products ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The complexity of multiple data access systems, often with different metadata formats and API access types, -presents a challenge for finding and integrating diverse datasets. +The complexity of multiple data access systems, often with different metadata formats and API access types, +presents a challenge for finding and integrating diverse datasets. Driven by researcher use cases, icepyx contains a consistent framework for adding a new product/sensor to an existing data analysis pipeline, improving researcher ability to easily compare diverse datasets across varying sensor types and spatial and temporal scales. @@ -58,4 +58,4 @@ icepyx is continually evolving and its direction is driven by your feedback and If you'd like to add a theme to this development plan, please submit your idea in `GitHub Discussions `_ to solicit community feedback. -Once there is agreement on your idea, submit a pull request to update the Development Plan, including a link to the discussion. \ No newline at end of file +Once there is agreement on your idea, submit a pull request to update the Development Plan, including a link to the discussion. diff --git a/doc/source/contributing/how_to_contribute.rst b/doc/source/contributing/how_to_contribute.rst index b55394d55..84c21717b 100644 --- a/doc/source/contributing/how_to_contribute.rst +++ b/doc/source/contributing/how_to_contribute.rst @@ -1,4 +1,4 @@ -.. _dev_guide_label: +.. _dev_guide_label: How to Contribute ================= @@ -12,20 +12,20 @@ Contributing for the first time ------------------------------- 1. If you don't have one, sign up for a GitHub account (visit https://github.com/ and β€˜sign up for GitHub account’). -2. Clone the icepyx repo: Open a terminal window. -Navigate to the folder on your computer where you want to store icepyx. -For example, +2. Clone the icepyx repo: Open a terminal window. +Navigate to the folder on your computer where you want to store icepyx. +For example, .. code-block:: shell cd /Users/YOURNAMEHERE/documents/ICESat-2 - -Within this folder, clone the icepyx repo by executing + +Within this folder, clone the icepyx repo by executing .. code-block:: shell git clone https://github.com/icesat2py/icepyx.git - + You should receive confirmation in terminal of the files loading into your workspace. For help navigating git and GitHub, see this `guide `__. `GitHub `_ also provides a lot of great how-to materials for navigating and contributing. @@ -36,10 +36,10 @@ Every time you contribute 1. To add new content, you need to create a new branch. You can do this on GitHub by clicking the down arrow next to β€˜development’ and making a new branch -(you can give it whatever name you want - the naming doesn't matter much as it will only be a temporary branch). +(you can give it whatever name you want - the naming doesn't matter much as it will only be a temporary branch). 2. Navigate to the new branch you created. -Make any edits or additions on this branch (this can be done locally or on GitHub directly). +Make any edits or additions on this branch (this can be done locally or on GitHub directly). After you do this, commit your changes and add a descriptive commit message. 3. After committing the changes, push them to GitHub if you were working locally. @@ -60,7 +60,7 @@ The bot will open a separate PR to add the contributor or new contribution types 5. Repeat these steps, creating a new branch and ultimately a pull request for each change. More, smaller pull requests are easier to debug and merge than fewer large ones, so create pull requests regularly! - + Steps for working with icepyx locally ------------------------------------- @@ -70,7 +70,7 @@ update your local copy of icepyx with .. code-block:: shell git pull https://github.com/icesat2py/icepyx.git - + to ensure you have the most up to date version of icepyx in your library. @@ -87,14 +87,26 @@ Setting up a Development Work Environment ----------------------------------------- icepyx uses a few tools to ensure that files have consistent formatting and run tests. -You can easily install the ones most frequently used by creating a new mamba (or conda) +You can easily install the ones most frequently used by creating a new mamba (or conda) environment (from the home level of your local copy of the icepyx repo) with .. code-block:: shell mamba env create --name icepyx-env --channel conda-forge -f requirements-dev.txt -f requirements.txt -and then pip installing icepyx as described above and below. +and then (1) running `pre-commit install` to let git know about pre-commit and +(2) pip installing icepyx as described above and below. + +One of the tools installed with "requirements-dev.txt" is called [pre-commit](https://pre-commit.com/). +We have included a set of pre-commit formatting hooks that we strongly encourage all contributors to use. +These hooks will check the files you are committing for format consistency, +reformatting the files if necessary. +You can tell files were reformatted if you get a message showing one of the checks failed. +In this case, you will need to re-commit your changes until all pre-commit hooks pass +(i.e. a failed pre-commit check results in no git commit). +Pre-commit will also run on icepyx PRs using the pre-commit CI (continuous integration). +As with other automations happening in PRs, +you'll want to make sure you pull the changes back to your local version before making new commits. Considerations with Jupyter Notebook @@ -107,9 +119,9 @@ If you are working in Jupyter Notebook, in addition to manually installing your pip install -e. you will need to dynamically reload icepyx within your notebook by executing - + .. code-block:: python - + %load_ext autoreload import icepyx as ipx %autoreload 2 diff --git a/doc/source/contributing/icepyx_internals.rst b/doc/source/contributing/icepyx_internals.rst index e224be3ce..61c3e8ccb 100644 --- a/doc/source/contributing/icepyx_internals.rst +++ b/doc/source/contributing/icepyx_internals.rst @@ -4,11 +4,12 @@ icepyx Internals Authentication -------------- Authentication in icepyx is handled using a Mixin class. A Mixin class is a class -which defines functionality that may be desired by multiple other classes within -a library. For example, at this time both the Query and Variables classes need +which defines functionality that may be desired by multiple other classes within +a library. +For example, at this time the Query, Variables, and Read classes need to be able to authenticate. Instead of defining the same properties and functionality twice, icepyx has an EarthdataAuthMixin class that is inherited -by both modules. +by any modules that need an Earthdata login. **Property Access** diff --git a/doc/source/contributing/quest-available-datasets.rst b/doc/source/contributing/quest-available-datasets.rst index 86901f7ed..490ce6200 100644 --- a/doc/source/contributing/quest-available-datasets.rst +++ b/doc/source/contributing/quest-available-datasets.rst @@ -13,9 +13,19 @@ List of Datasets ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The Argo mission involves a series of floats that are designed to capture vertical ocean profiles of temperature, salinity, and pressure down to ~2000 m. Some floats are in support of BGC-Argo, which also includes data relevant for biogeochemical applications: oxygen, nitrate, chlorophyll, backscatter, and solar irradiance. -A paper outlining the Argo extension to QUEST is currently in preparation, with a citable preprint available in the near future. +For interested readers, a preprint outlining the QUEST module and its application to Argo data access is available `here `_. -:ref:`Argo Workflow Example` +`Argo Workflow Example `_ + +QUEST uses the Argovis API to access Argo data, so users are encouraged to use the following citation: + +.. container:: custom + + Tucker, T., D. Giglio, M. Scanderbeg, and S.S.P. Shen, 2020. + Argovis: A Web Applications for Fast Delivery, Visualization, and Analysis of Argo data. + J. Atmos. Oceanic Technol., 37, 401-416, https://doi.org/10.1175/JTECH-D-19-0041.1 + +Citations for individual Argo datasets may be found at this link: https://argovis.colorado.edu/about Adding a Dataset to QUEST diff --git a/doc/source/contributing/release_guide.rst b/doc/source/contributing/release_guide.rst index 2bd901fb3..b5bb4c637 100644 --- a/doc/source/contributing/release_guide.rst +++ b/doc/source/contributing/release_guide.rst @@ -70,7 +70,8 @@ This will involve submitting a new PR to development with whatever debugging cha Once merged into development, any changes will automatically be reflected in this step's PR, and the tests will rerun automatically. With an approving review and passed tests in hand, you're ready to push the new release! -Unlike when you merge new features into ``development`` with a squash merge, for this step you'll want to use a plain old merge. +Unlike when you merge new features into ``development`` with a squash merge, +for this step you'll want to use a plain old merg (the button says "Create a Merge Commit"). This makes it easy to keep ``development`` and ``main`` even instead of diverging due to a series of merge commits. `This website `_ does a great job explaining the how and why of not using a squash merge here. @@ -103,9 +104,9 @@ Tag the Release Last, but potentially most importantly, we need to tag and create the release. This step will trigger the package to be built and update the distribution available from conda and PyPI. It will also publish the new release on Zenodo. -GitHub makes releases easy - on the repo's home page, simply select "Releases" from the right hand side +GitHub makes releases easy - on the repo's home page, simply select "Releases" from the right hand side and then the "Draft a New Release" button. -Add a new tag with the version number of your release, making sure it points to the ``main`` branch +Add a new tag with the version number of your release, making sure it points to the ``main`` branch (by default, GitHub will suggest the ``development`` branch!) Fill out the form and create the release. @@ -114,7 +115,7 @@ You can delete the release from GitHub with the click of a button. If you want to reuse the version tag though (you most likely do), you'll first have to remove the tag locally and push the updated (deleted) tag to GitHub: .. code-block:: shell - + git push --delete origin tagname @@ -134,4 +135,4 @@ To make the latest release available via conda-forge, a few bots will run and le Then they can manually approve the merge to the feedstock repo and the new release will be available in a few minutes. Congratulations! You released a new version of icepyx! -Share the good news on Twitter or Slack and appreciate your hard work and contributions to open-source development. \ No newline at end of file +Share the good news on Twitter or Slack and appreciate your hard work and contributions to open-source development. diff --git a/doc/source/example_notebooks/IS2_data_access.ipynb b/doc/source/example_notebooks/IS2_data_access.ipynb index 704abe10c..67f94a7e7 100644 --- a/doc/source/example_notebooks/IS2_data_access.ipynb +++ b/doc/source/example_notebooks/IS2_data_access.ipynb @@ -472,7 +472,7 @@ "### Log in to NASA Earthdata\n", "When downloading data from NSIDC, all users must login using a valid (free) Earthdata account. The process of authenticating is handled by icepyx by creating and handling the required authentication to interface with the data at the DAAC (including ordering and download). Authentication is completed as login-protected features are accessed. In order to allow icepyx to login for us we still have to make sure that we have made our Earthdata credentials available for icepyx to find.\n", "\n", - "There are multiple ways to provide your Earthdata credentials via icepyx. Behind the scenes, icepyx is using the [earthaccess library](https://nsidc.github.io/earthaccess/). The [earthaccess documentation](https://nsidc.github.io/earthaccess/tutorials/restricted-datasets/#auth) automatically tries three primary mechanisms for logging in, all of which are supported by icepyx:\n", + "There are multiple ways to provide your Earthdata credentials via icepyx. Behind the scenes, icepyx is using the [earthaccess library](https://nsidc.github.io/earthaccess/). The [earthaccess documentation](https://earthaccess.readthedocs.io/en/latest/tutorials/getting-started/#auth) automatically tries three primary mechanisms for logging in, all of which are supported by icepyx:\n", "- with `EARTHDATA_USERNAME` and `EARTHDATA_PASSWORD` environment variables (these are the same as the ones you might have set for icepyx previously)\n", "- through an interactive, in-notebook login (used below); passwords are not shown plain text with this option\n", "- with stored credentials in a .netrc file (not recommended for security reasons)" diff --git a/doc/source/example_notebooks/supporting_files/data-access_PineIsland/CITATIONS.txt b/doc/source/example_notebooks/supporting_files/data-access_PineIsland/CITATIONS.txt index 6c0d7eaec..222ce625d 100644 --- a/doc/source/example_notebooks/supporting_files/data-access_PineIsland/CITATIONS.txt +++ b/doc/source/example_notebooks/supporting_files/data-access_PineIsland/CITATIONS.txt @@ -29,4 +29,3 @@ For Analysis_ID 528486, the appropriate citation is HAI, Gang (submitter); HAI, Gang; LI, Rongxing; Tian, Yixiang; Xie, Huan (analyst(s)), 2018. GLIMS Glacier Database. Boulder, CO. National Snow and Ice Data Center. http://dx.doi.org/10.7265/N5V98602 - diff --git a/doc/source/example_notebooks/supporting_files/data-access_PineIsland/glims_polygons.prj b/doc/source/example_notebooks/supporting_files/data-access_PineIsland/glims_polygons.prj index a30c00a55..8f73f480f 100644 --- a/doc/source/example_notebooks/supporting_files/data-access_PineIsland/glims_polygons.prj +++ b/doc/source/example_notebooks/supporting_files/data-access_PineIsland/glims_polygons.prj @@ -1 +1 @@ -GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]] \ No newline at end of file +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]] diff --git a/doc/source/getting_started/citation_link.rst b/doc/source/getting_started/citation_link.rst index c7f2c41ee..f3cff6255 100644 --- a/doc/source/getting_started/citation_link.rst +++ b/doc/source/getting_started/citation_link.rst @@ -1 +1 @@ -.. include:: ../../../CITATION.rst \ No newline at end of file +.. include:: ../../../CITATION.rst diff --git a/doc/source/getting_started/install.rst b/doc/source/getting_started/install.rst index 453e95e7d..0ee77de55 100644 --- a/doc/source/getting_started/install.rst +++ b/doc/source/getting_started/install.rst @@ -1,8 +1,8 @@ .. _`zipped file`: https://github.com/icesat2py/icepyx/archive/main.zip .. _`Fiona`: https://pypi.org/project/Fiona/ -.. |Conda install| image:: https://anaconda.org/conda-forge/icepyx/badges/version.svg +.. |Conda install| image:: https://anaconda.org/conda-forge/icepyx/badges/version.svg :target: https://anaconda.org/conda-forge/icepyx - + .. |Pypi install| image:: https://badge.fury.io/py/icepyx.svg :target: https://pypi.org/project/icepyx/ @@ -43,7 +43,7 @@ To upgrade an installed version of icepyx to the latest stable release, do:: -Using pip |Pypi install| +Using pip |Pypi install| ------------------------ Alternatively, you can also install icepyx using `pip `__. @@ -83,4 +83,4 @@ Alternatively, in a command line or terminal, navigate to the folder in your clo .. code-block:: - pip install -e. \ No newline at end of file + pip install -e. diff --git a/doc/source/getting_started/origin_purpose.rst b/doc/source/getting_started/origin_purpose.rst index f39ba4dc4..e4a97dfe5 100644 --- a/doc/source/getting_started/origin_purpose.rst +++ b/doc/source/getting_started/origin_purpose.rst @@ -1,12 +1,12 @@ Origin and Purpose ================== -icepyx is both a software library and a community composed of ICESat-2 data users, developers, and the scientific community. We are working together to develop a shared library of resources - including existing resources, new code, tutorials, and use-cases/examples - that simplify the process of querying, obtaining, analyzing, and manipulating ICESat-2 and relevant ancillary datasets to enable scientific discovery. +icepyx is both a software library and a community composed of ICESat-2 data users, developers, and the scientific community. We are working together to develop a shared library of resources - including existing resources, new code, tutorials, and use-cases/examples - that simplify the process of querying, obtaining, analyzing, and manipulating ICESat-2 and (via the QUEST module) relevant ancillary datasets to enable scientific discovery. icepyx aims to provide a clearinghouse for code, functionality to improve interoperability, documentation, examples, and educational resources that tackle disciplinary research questions while minimizing the amount of repeated effort across groups utilizing similar datasets. icepyx also hopes to foster collaboration, open-science, and reproducible workflows by integrating and sharing resources. -Many of the underlying tools from which icepyx was developed began as Jupyter Notebooks developed for and during the cryosphere-themed ICESat-2 Hackweek at the University of Washington in June 2019 or as scripts written and used by the ICESat-2 Science Team members. -Originally called icesat2py, the project combined and generalized these scripts into a unified framework, adding examples, documentation, and testing where necessary and making them accessible for everyone. -icepyx is now a domain-agnostic, standalone software package and community (under the broader `icesat2py GitHub organization `_) that continues to build functionality for obtaining and working with ICESat-2 data products locally and in the cloud. -It also improves interoperability for ICESat-2 datasets with other open-source tools. +Many of the underlying tools from which icepyx was developed began as Jupyter Notebooks developed for and during the cryosphere-themed ICESat-2 Hackweek at the University of Washington in June 2019 or as scripts written and used by the ICESat-2 Science Team members. +Originally called icesat2py, the project combined and generalized these scripts into a unified framework, adding examples, documentation, and testing where necessary and making them accessible for everyone. +icepyx is now a domain-agnostic, standalone software package and community (under the broader `icesat2py GitHub organization `_) that continues to build functionality for obtaining and working with ICESat-2 data products locally and in the cloud. +It also improves interoperability for ICESat-2 datasets with other open-source tools. Our :ref:`resources guide` provides additional information on both the foundational documents for icepyx and closely related libraries for working with ICESat-2 data. diff --git a/doc/source/index.rst b/doc/source/index.rst index 612af6adc..26f398605 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -6,24 +6,25 @@ :target: https://doi.org/10.21105/joss.04912 -icepyx |version badge| |JOSS| -========================== +icepyx |version badge| |JOSS| +================================== **Python tools for obtaining and working with ICESat-2 data** -Quick Links: +Quick Links: :ref:`Installation` | :ref:`Citation` | `Examples `_ | `Source Code `_ | :ref:`Contact` -icepyx is both a software library and a community composed of ICESat-2 data users, -developers, and the scientific community. -We are working together to develop a shared library of resources - -including existing resources, new code, tutorials, and use-cases/examples - -that simplify the process of querying, obtaining, analyzing, and manipulating +icepyx is both a software library and a community composed of ICESat-2 data users, +developers, and the scientific community. +We are working together to develop a shared library of resources - +including existing resources, new code, tutorials, and use-cases/examples - +that simplify the process of querying, obtaining, analyzing, and manipulating ICESat-2 datasets to enable scientific discovery. +To further enhance data discovery, we have developed the QUEST module to facilitate querying of ICESat-2 data and complimentary Argo oceanographic data, with additional dataset support expected in the future. .. panels:: @@ -62,11 +63,11 @@ ICESat-2 datasets to enable scientific discovery. --- :img-top: https://cdn-icons-png.flaticon.com/512/4230/4230997.png - + **Development Guide** ^^^^^^^^^^^^^^^^^^^^^ - Have an idea or an ancillary dataset to contribute to icepyx? Go here for information on best practices + Have an idea or an ancillary dataset to contribute to icepyx? Go here for information on best practices for developing and contributing to icepyx. .. link-button:: dev_guide_label @@ -136,6 +137,7 @@ ICESat-2 datasets to enable scientific discovery. :caption: User Guide user_guide/documentation/icepyx + user_guide/documentation/icepyx-quest user_guide/changelog/index .. toctree:: @@ -163,5 +165,5 @@ ICESat-2 datasets to enable scientific discovery. tracking/citations tracking/downloads -Icon images from `Flaticon `_ (by Freepik, Pixel perfect, and Eucalyp) +Icon images from `Flaticon `_ (by Freepik, Pixel perfect, and Eucalyp) and `NASA `_. diff --git a/doc/source/tracking/citations.rst b/doc/source/tracking/citations.rst index bf5672587..b6ed7b004 100644 --- a/doc/source/tracking/citations.rst +++ b/doc/source/tracking/citations.rst @@ -3,7 +3,7 @@ Who is using icepyx? How is icepyx being used by the ICESat-2 data user community? -Is your team or project using icepyx but not listed below? +Is your team or project using icepyx but not listed below? Please add your organization to the appropriate list with a link to your project/product (or :ref:`get in touch` and we'll add it)! @@ -25,16 +25,25 @@ Peer-reviewed publications about icepyx software .. bibliography:: icepyx_pubs.bib :style: mystyle - + joss2023 +icepyx in the open-source landscape + +.. bibliography:: icepyx_pubs.bib + :style: mystyle + + Bednar2023 + Presentations and Materials Featuring icepyx -------------------------------------------- Presentations that feature or explain icepyx .. bibliography:: icepyx_pubs.bib :style: mystyle - + + quest2023agu + selper2023 js2022agu 2022_IS2-HW-tutorials js2021agu @@ -43,15 +52,18 @@ Presentations that feature or explain icepyx js2019agu Publications Utilizing icepyx ------------------------------- +----------------------------- Research that utilizes icepyx for ICESat-2 data .. bibliography:: icepyx_pubs.bib :style: mystyle - + + Tian2024 Freer2023 Idestrom2023 + Mallinis2023 Shean2023 + Snellink2023 Eidam2022 Leeuwen:2022 Musthafa2022 diff --git a/doc/source/tracking/downloads.rst b/doc/source/tracking/downloads.rst index 0eaf828ad..eccc83703 100644 --- a/doc/source/tracking/downloads.rst +++ b/doc/source/tracking/downloads.rst @@ -1,13 +1,13 @@ icepyx Adoption =============== - -Estimating usage of open-source software is a fundamentally difficult task, and "easy" metrics like number of downloads + +Estimating usage of open-source software is a fundamentally difficult task, and "easy" metrics like number of downloads `have the potential to be misleading `_. -We are excited by the enthusiastic adoption of icepyx by the ICESat-2 data user community, +We are excited by the enthusiastic adoption of icepyx by the ICESat-2 data user community, and despite these limitations in data tracking metrics, we have begun (November 2020) to track aggregate user downloads and page views as shown below. -Although technologies exist, to respect user privacy and international regulations (without requiring browser cookies), +Although technologies exist, to respect user privacy and international regulations (without requiring browser cookies), we intentionally do not track the IP addresses of users accessing our code or documentation. As a result, we are unable to view usage statistics for specific pages/examples or repeat visitors. If you find certain materials especially helpful, we'd appreciate :ref:`hearing from you`! diff --git a/doc/source/tracking/icepyx_pubs.bib b/doc/source/tracking/icepyx_pubs.bib index d13c9653f..47c024273 100644 --- a/doc/source/tracking/icepyx_pubs.bib +++ b/doc/source/tracking/icepyx_pubs.bib @@ -1,34 +1,74 @@ % ######### materials about icepyx ################ -@article{joss2023, -doi = {10.21105/joss.04912}, -url = {https://doi.org/10.21105/joss.04912}, -year = {2023}, -publisher = {The Open Journal}, -volume = {8}, -number = {84}, -pages = {4912}, -author = {Jessica Scheick and - Wei Ji Leong and - Kelsey Bisson and - Anthony Arendt and - Shashank Bhushan and - Zachary Fair and - Norland Raphael Hagen and - Scott Henderson and - Friedrich Knuth and - Tian Li and - Zheng Liu and - Romina Piunno and - Nitin Ravinder and - Landung "Don" Setiawan and - Tyler Sutterley and - {JP} Swinski and - Anubhav}, -title = {{icepyx}: querying, obtaining, analyzing, and manipulating {ICESat-2} datasets}, -journal = {Journal of Open Source Software} -} + +@misc{quest2023agu, +author = {Scheick, Jessica and + Bisson, Kelsey and + Fair, Zachary and + Piunno, Romina and + Abib, Nicole and + {Di Bella}, Alessandro and + Tilling, Rachel}, +title = {On a {QUEST (Query, Unify, Explore SpatioTemporal)} + to Accelerate {ICESat-2} Applications in Ocean + Science via {icepyx}}, +month = jan, +year = 2024, +publisher = {Zenodo}, +version = {v1.1}, +doi = {10.5281/zenodo.10563003}, +url = {https://doi.org/10.5281/zenodo.10563003} +} +@misc{selper2023, +author = {Tilling, Rachel and + Lopez, Luis and + Steiker, Amy and + Scheick, Jessica + }, +title = {Using {icepyx} to access {ICESat-2 data}}, +note = {{GEDI/ICESat-2} Workshop, + 2023 Space and Sustainability Colloquium + (Espacio y Sostenibilidad). + Sociedad Latinoamericana en Percepci\'{o}n Remota y + Sistemas de Informaci\'{o}n Espacial {(SELPER)}, + Guadalajara, Mexico, 15-16 November 2023. + }, +month = Nov, +year = 2023, +url = {https://nasa-openscapes.github.io/2023-ssc/}, +comment = {https://espacioysostenibilidad.com}, +} + +@article{joss2023, +doi = {10.21105/joss.04912}, +url = {https://doi.org/10.21105/joss.04912}, +year = {2023}, +publisher = {The Open Journal}, +volume = {8}, +number = {84}, +pages = {4912}, +author = {Jessica Scheick and + Wei Ji Leong and + Kelsey Bisson and + Anthony Arendt and + Shashank Bhushan and + Zachary Fair and + Norland Raphael Hagen and + Scott Henderson and + Friedrich Knuth and + Tian Li and + Zheng Liu and + Romina Piunno and + Nitin Ravinder and + Landung "Don" Setiawan and + Tyler Sutterley and + {JP} Swinski and + Anubhav}, +title = {{icepyx}: querying, obtaining, analyzing, and manipulating {ICESat-2} datasets}, +journal = {Journal of Open Source Software} +} + @unpublished{js2022agu, author = {Scheick, J and Bisson, K and Fair, Z and Piunno, R and Leong, WJ and Lopez, L and Hall, S}, note = {Invited abstract and poster. American Geophysical Union Fall Meeting, Chicago, IL, USA. 12-16 December 2022.}, @@ -140,6 +180,19 @@ @misc{js2019agu % ######### Articles/Proceedings citing icepyx ################ +@INPROCEEDINGS{Bednar2023, +author={Bednar, James A. and Durant, Martin}, +booktitle={Proceedings of the 22nd {Python} in Science Conference (SciPy 2023)}, +title={The {Pandata} Scalable Open-Source Analysis Stack}, +year={2023}, +volume={}, +number={}, +pages={85-92}, +doi={}, +url={https://conference.scipy.org/proceedings/scipy2023/pdfs/james_bednar.pdf} +} + + @article{Bisson:2021, author = {Bisson, K. M. and Cael, B. B.}, title = {How Are Under Ice Phytoplankton Related to Sea Ice in the {Southern Ocean}?}, @@ -154,11 +207,11 @@ @article{Bisson:2021 @INPROCEEDINGS{Eidam2022, -author={Eidam, Emily and Walker, Catherine and - Bisson, Kelsey and Paris, Matthew and +author={Eidam, Emily and Walker, Catherine and + Bisson, Kelsey and Paris, Matthew and Cooper, Lillian}, -booktitle={OCEANS 2022, Hampton Roads}, -title={Novel application of {ICESat-2 ATLAS} data to determine coastal light attenuation as a proxy for suspended particulate matter}, +booktitle={OCEANS 2022, Hampton Roads}, +title={Novel application of {ICESat-2 ATLAS} data to determine coastal light attenuation as a proxy for suspended particulate matter}, year={2022}, volume={}, number={}, @@ -229,6 +282,21 @@ @Article{Li:2020 } +@Article{Mallinis2023, +AUTHOR = {Mallinis, Giorgos and Verde, Natalia and Siachalou, Sofia and Latinopoulos, Dionisis and Akratos, Christos and Kagalou, Ifigenia}, +TITLE = {Evaluation of Multiple Classifier Systems for Mapping Different Hierarchical Levels of Forest Ecosystems in the Mediterranean Region Using {Sentinel-2, Sentinel-1, and ICESat-2} Data}, +JOURNAL = {Forests}, +VOLUME = {14}, +YEAR = {2023}, +NUMBER = {11}, +ARTICLE-NUMBER = {2224}, +URL = {https://www.mdpi.com/1999-4907/14/11/2224}, +ISSN = {1999-4907}, +ABSTRACT = {The conservation and management of forest areas require knowledge about their extent and attributes on multiple scales. The combination of multiple classifiers has been proposed as an attractive classification approach for improved accuracy and robustness that can efficiently exploit the complementary nature of diverse remote sensing data and the merits of individual classifiers. The aim of this study was to develop and evaluate multiple classifier systems (MCSs) within a cloud-based computing environment for multi-scale forest mapping in Northeastern Greece using passive and active remote sensing data. Five individual machine learning base classifiers were used for class discrimination across the three different hierarchy levels, and five ensemble approaches were used for combining them. In the case of the binary classification scheme in the upper level of the hierarchy for separating woody vegetation (forest and shrubs) from other land, the overall accuracy (OA) slightly increased with the use of the MCS approach, reaching 94\%. At the lower hierarchical levels, when using the support vector machine (SVM) base classifier, OA reached 84.13\% and 74.89\% for forest type and species mapping, respectively, slightly outperforming the MCS approach. Yet, two MCS approaches demonstrated robust performance in terms of per-class accuracy, presenting the highest average F1 score across all classification experiments, indicating balanced misclassification errors across all classes. Since the competence of individual classifiers is dependent on individual scene settings and data characteristics, we suggest that the adoption of MCS systems in efficient computing environments (i.e., cloud) could alleviate the need for algorithm benchmarking for Earth's surface cover mapping.}, +DOI = {10.3390/f14112224} +} + + @article{Musthafa2022, abstract = {Forests absorb atmospheric carbon and hence play a vital role in carbon sequestration and climate regulation. Recent research emphasizes developing technology and methods to understand the carbon sequestration potential in various forest ecosystems. Forest stand height estimation is one of the crucial parameters in allometry that estimates forest biomass. An attempt is made in this study to map forest stand height in tropical and sub-tropical forests in India using recently launched spaceborne LiDAR platforms Ice Cloud and Elevation Satellite (ICESat-2) and Global Ecosystem Dynamics Investigation (GEDI). A geostatistical kriging approach is used to interpolate the forest stand height, and the generated stand height surface is validated using ground truth samples. The results showed that GEDI data performed better with an RMSE of 3.99 m and 2.62 m in tropical forests than the ICESat-2 data, which showed an RMSE of 5.71 m and 5.08 m, respectively. A similar pattern was observed in sub-tropical forests where GEDI modelled stand height outperformed ICESat-2 modelled stand height. This analysis demonstrates the potential of existing spaceborne LiDAR platforms in interpolating forest stand height at different forest types. Also, the research emphasizes the necessity of a high density of LiDAR footprints spread in both across- and along-track directions for accurate interpolation of forest stand height.}, author = {Musthafa, Mohamed and Singh, Gulab and Kumar, Praveen}, @@ -244,26 +312,17 @@ @article{Musthafa2022 } -% ######### Articles about ICESat-2 ################ - -@article{is2, -title = {The {Ice, Cloud, and land Elevation Satellite-2 (ICESat-2)}: Science requirements, concept, and implementation}, -journal = {Remote Sensing of Environment}, -volume = {190}, -pages = {260-273}, -year = {2017}, -issn = {0034-4257}, -doi = {10.1016/j.rse.2016.12.029}, -url = {https://www.sciencedirect.com/science/article/pii/S0034425716305089}, -author = {Thorsten Markus and Tom Neumann and Anthony Martino and Waleed Abdalati and Kelly Brunt and Beata Csatho and Sinead Farrell and Helen Fricker and Alex Gardner and David Harding and Michael Jasinski and Ron Kwok and Lori Magruder and Dan Lubin and Scott Luthcke and James Morison and Ross Nelson and Amy Neuenschwander and Stephen Palm and Sorin Popescu and CK Shum and Bob E. Schutz and Benjamin Smith and Yuekui Yang and Jay Zwally}, -keywords = {ICESat-2, Land ice, Sea ice, Vegetation, Climate change, Satellite mission}, -abstract = {The Ice, Cloud, and land Elevation Satellite (ICESat) mission used laser altimetry measurements to determine changes in elevations of glaciers and ice sheets, as well as sea ice thickness distribution. These measurements have provided important information on the response of the cryopshere (Earth's frozen surfaces) to changes in atmosphere and ocean condition. ICESat operated from 2003 to 2009 and provided repeat altimetry measurements not only to the cryosphere scientific community but also to the ocean, terrestrial and atmospheric scientific communities. The conclusive assessment of significant ongoing rapid changes in the Earth's ice cover, in part supported by ICESat observations, has strengthened the need for sustained, high accuracy, repeat observations similar to what was provided by the ICESat mission. Following recommendations from the National Research Council for an ICESat follow-on mission, the ICESat-2 mission is now under development for planned launch in 2018. The primary scientific aims of the ICESat-2 mission are to continue measurements of sea ice freeboard and ice sheet elevation to determine their changes at scales from outlet glaciers to the entire ice sheet, and from 10s of meters to the entire polar oceans for sea ice freeboard. ICESat carried a single beam profiling laser altimeter that produced ~70m diameter footprints on the surface of the Earth at ~150m along-track intervals. In contrast, ICESat-2 will operate with three pairs of beams, each pair separated by about 3km cross-track with a pair spacing of 90m. Each of the beams will have a nominal 17m diameter footprint with an along-track sampling interval of 0.7m. The differences in the ICESat-2 measurement concept are a result of overcoming some limitations associated with the approach used in the ICESat mission. The beam pair configuration of ICESat-2 allows for the determination of local cross-track slope, a significant factor in measuring elevation change for the outlet glaciers surrounding the Greenland and Antarctica coasts. The multiple beam pairs also provide improved spatial coverage. The dense spatial sampling eliminates along-track measurement gaps, and the small footprint diameter is especially useful for sea surface height measurements in the often narrow leads needed for sea ice freeboard and ice thickness retrievals. The ICESat-2 instrumentation concept uses a low energy 532nm (green) laser in conjunction with single-photon sensitive detectors to measure range. Combining ICESat-2 data with altimetry data collected since the start of the ICESat mission in 2003, such as Operation IceBridge and ESA's CryoSat-2, will yield a 15+ year record of changes in ice sheet elevation and sea ice thickness. ICESat-2 will also provide information of mountain glacier and ice cap elevations changes, land and vegetation heights, inland water elevations, sea surface heights, and cloud layering and optical thickness.} +@misc{Snellink2023, +author = {Kamil Snellink}, +title = {Assessing Land Ice Height Decrease of the {Fleming Glacier} using {ICESat-2} Satellite Data: A 2019-2022 Analysis}, +school = {Delft University of Technology}, +year = {2023}, +month = {July}, +note = {Bachelor Thesis. Applied Earth Sciences, Department of Geoscience and Remote Sensing}, +url = {https://repository.tudelft.nl/islandora/object/uuid:909190a5-b91d-4f67-8134-3f19756ed817?collection=education} } -% ######### Research/Articles using (but not citing) icepyx ################ - - @article{Sothe:2022, title={Spatially Continuous Mapping of Forest Canopy Height in {Canada} by Combining {GEDI} and {ICESat-2} with {PALSAR} and {Sentinel}}, volume={14}, @@ -281,24 +340,53 @@ @article{Sothe:2022 } +@ARTICLE{Tian2024, + author={Tian, Xiangxi and Shan, Jie}, + journal={IEEE Transactions on Geoscience and Remote Sensing}, + title={{ICESat-2} Controlled Integration of {GEDI and SRTM} Data for Large-Scale Digital Elevation Model Generation}, + year={2024}, + volume={62}, + number={}, + pages={1-14}, + keywords={Orbits;Laser radar;Earth;Soft sensors;Extraterrestrial measurements;Altimetry;Spaceborne radar;Digital elevation model (DEM);global ecosystem dynamics investigation (GEDI);ice;cloud;and land elevation satellite-2 (ICESat-2);kernel ridge regression (KRR);machine learning;random forest (RF);shuttle radar topography mission (SRTM);support vector regression (SVR)}, + doi={10.1109/TGRS.2024.3389821}} + + +% ######### Articles about ICESat-2 ################ + +@article{is2, +title = {The {Ice, Cloud, and land Elevation Satellite-2 (ICESat-2)}: Science requirements, concept, and implementation}, +journal = {Remote Sensing of Environment}, +volume = {190}, +pages = {260-273}, +year = {2017}, +issn = {0034-4257}, +doi = {10.1016/j.rse.2016.12.029}, +url = {https://www.sciencedirect.com/science/article/pii/S0034425716305089}, +author = {Thorsten Markus and Tom Neumann and Anthony Martino and Waleed Abdalati and Kelly Brunt and Beata Csatho and Sinead Farrell and Helen Fricker and Alex Gardner and David Harding and Michael Jasinski and Ron Kwok and Lori Magruder and Dan Lubin and Scott Luthcke and James Morison and Ross Nelson and Amy Neuenschwander and Stephen Palm and Sorin Popescu and CK Shum and Bob E. Schutz and Benjamin Smith and Yuekui Yang and Jay Zwally}, +keywords = {ICESat-2, Land ice, Sea ice, Vegetation, Climate change, Satellite mission}, +abstract = {The Ice, Cloud, and land Elevation Satellite (ICESat) mission used laser altimetry measurements to determine changes in elevations of glaciers and ice sheets, as well as sea ice thickness distribution. These measurements have provided important information on the response of the cryopshere (Earth's frozen surfaces) to changes in atmosphere and ocean condition. ICESat operated from 2003 to 2009 and provided repeat altimetry measurements not only to the cryosphere scientific community but also to the ocean, terrestrial and atmospheric scientific communities. The conclusive assessment of significant ongoing rapid changes in the Earth's ice cover, in part supported by ICESat observations, has strengthened the need for sustained, high accuracy, repeat observations similar to what was provided by the ICESat mission. Following recommendations from the National Research Council for an ICESat follow-on mission, the ICESat-2 mission is now under development for planned launch in 2018. The primary scientific aims of the ICESat-2 mission are to continue measurements of sea ice freeboard and ice sheet elevation to determine their changes at scales from outlet glaciers to the entire ice sheet, and from 10s of meters to the entire polar oceans for sea ice freeboard. ICESat carried a single beam profiling laser altimeter that produced ~70m diameter footprints on the surface of the Earth at ~150m along-track intervals. In contrast, ICESat-2 will operate with three pairs of beams, each pair separated by about 3km cross-track with a pair spacing of 90m. Each of the beams will have a nominal 17m diameter footprint with an along-track sampling interval of 0.7m. The differences in the ICESat-2 measurement concept are a result of overcoming some limitations associated with the approach used in the ICESat mission. The beam pair configuration of ICESat-2 allows for the determination of local cross-track slope, a significant factor in measuring elevation change for the outlet glaciers surrounding the Greenland and Antarctica coasts. The multiple beam pairs also provide improved spatial coverage. The dense spatial sampling eliminates along-track measurement gaps, and the small footprint diameter is especially useful for sea surface height measurements in the often narrow leads needed for sea ice freeboard and ice thickness retrievals. The ICESat-2 instrumentation concept uses a low energy 532nm (green) laser in conjunction with single-photon sensitive detectors to measure range. Combining ICESat-2 data with altimetry data collected since the start of the ICESat mission in 2003, such as Operation IceBridge and ESA's CryoSat-2, will yield a 15+ year record of changes in ice sheet elevation and sea ice thickness. ICESat-2 will also provide information of mountain glacier and ice cap elevations changes, land and vegetation heights, inland water elevations, sea surface heights, and cloud layering and optical thickness.} +} + + % ######### Related Software ################ -@article{Shean2023, -doi = {10.21105/joss.04982}, -url = {https://doi.org/10.21105/joss.04982}, -year = {2023}, -publisher = {The Open Journal}, -volume = {8}, -number = {81}, -pages = {4982}, -author = {David Shean and J.p. Swinski and - Ben Smith and Tyler Sutterley and - Scott Henderson and Carlos Ugarte and - Eric Lidwa and Thomas Neumann}, -title = {{SlideRule}: Enabling rapid, scalable, open science for the {NASA ICESat-2} mission and beyond}, -journal = {Journal of Open Source Software} -} +@article{Shean2023, +doi = {10.21105/joss.04982}, +url = {https://doi.org/10.21105/joss.04982}, +year = {2023}, +publisher = {The Open Journal}, +volume = {8}, +number = {81}, +pages = {4982}, +author = {David Shean and J.p. Swinski and + Ben Smith and Tyler Sutterley and + Scott Henderson and Carlos Ugarte and + Eric Lidwa and Thomas Neumann}, +title = {{SlideRule}: Enabling rapid, scalable, open science for the {NASA ICESat-2} mission and beyond}, +journal = {Journal of Open Source Software} +} @misc{SR, @@ -324,4 +412,4 @@ @misc{OA publisher = {Earth Sci Inform}, doi = {10.1007/s12145-020-00520-2}, url = {https://openaltimetry.org/} -} \ No newline at end of file +} diff --git a/doc/source/tracking/pypistats/downloads.svg b/doc/source/tracking/pypistats/downloads.svg index cda5938b4..484dc6d7f 100644 --- a/doc/source/tracking/pypistats/downloads.svg +++ b/doc/source/tracking/pypistats/downloads.svg @@ -6,11 +6,11 @@ - 2023-12-01T20:57:28.674190 + 2024-05-01T20:57:00.508010 image/svg+xml - Matplotlib v3.8.2, https://matplotlib.org/ + Matplotlib v3.8.4, https://matplotlib.org/ @@ -21,19 +21,19 @@ - - @@ -41,151 +41,151 @@ z - - + - - - - - - @@ -205,43 +205,43 @@ z - + - + - @@ -261,12 +261,12 @@ z - + - + @@ -283,36 +283,36 @@ z - + - + - @@ -332,21 +332,21 @@ z - + - + - @@ -366,12 +366,12 @@ z - + - + @@ -385,113 +385,156 @@ z - + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - @@ -504,17 +547,17 @@ z - + - - + - + @@ -522,12 +565,12 @@ L -3.5 0 - + - + - + @@ -536,12 +579,12 @@ L -3.5 0 - + - + - + @@ -551,1405 +594,1544 @@ L -3.5 0 - - + + - - - - - - - + - + - - - - - - - - - - - - - - - @@ -1983,7 +2165,7 @@ z - + diff --git a/doc/source/tracking/pypistats/downloads_data.csv b/doc/source/tracking/pypistats/downloads_data.csv index f7e2a4f6a..b2ea37f93 100644 --- a/doc/source/tracking/pypistats/downloads_data.csv +++ b/doc/source/tracking/pypistats/downloads_data.csv @@ -257,13 +257,9 @@ with_mirrors,2021-03-09,14 with_mirrors,2021-03-10,7 with_mirrors,2021-03-11,41 with_mirrors,2021-03-12,100 -with_mirrors,2021-03-12,41 with_mirrors,2021-03-13,36 -with_mirrors,2021-03-13,29 with_mirrors,2021-03-14,24 -with_mirrors,2021-03-14,21 with_mirrors,2021-03-15,18 -with_mirrors,2021-03-15,7 with_mirrors,2021-03-16,46 with_mirrors,2021-03-17,36 with_mirrors,2021-03-18,8 @@ -512,10 +508,8 @@ with_mirrors,2021-11-19,15 with_mirrors,2021-11-20,41 with_mirrors,2021-11-21,7 with_mirrors,2021-11-22,39 -with_mirrors,2021-11-23,13 with_mirrors,2021-11-23,26 with_mirrors,2021-11-24,34 -with_mirrors,2021-11-24,3 with_mirrors,2021-11-25,20 with_mirrors,2021-11-26,3 with_mirrors,2021-11-27,32 @@ -1240,6 +1234,158 @@ with_mirrors,2023-11-27,88 with_mirrors,2023-11-28,20 with_mirrors,2023-11-29,99 with_mirrors,2023-11-30,59 +with_mirrors,2023-12-01,4 +with_mirrors,2023-12-02,37 +with_mirrors,2023-12-03,6 +with_mirrors,2023-12-04,17 +with_mirrors,2023-12-05,40 +with_mirrors,2023-12-06,25 +with_mirrors,2023-12-07,41 +with_mirrors,2023-12-08,15 +with_mirrors,2023-12-09,8 +with_mirrors,2023-12-10,3 +with_mirrors,2023-12-11,17 +with_mirrors,2023-12-12,13 +with_mirrors,2023-12-13,54 +with_mirrors,2023-12-14,14 +with_mirrors,2023-12-15,54 +with_mirrors,2023-12-16,11 +with_mirrors,2023-12-17,39 +with_mirrors,2023-12-18,17 +with_mirrors,2023-12-19,7 +with_mirrors,2023-12-20,48 +with_mirrors,2023-12-21,9 +with_mirrors,2023-12-22,66 +with_mirrors,2023-12-23,8 +with_mirrors,2023-12-24,71 +with_mirrors,2023-12-25,6 +with_mirrors,2023-12-26,5 +with_mirrors,2023-12-27,11 +with_mirrors,2023-12-28,60 +with_mirrors,2023-12-29,4 +with_mirrors,2023-12-30,30 +with_mirrors,2023-12-31,59 +with_mirrors,2024-01-01,36 +with_mirrors,2024-01-02,18 +with_mirrors,2024-01-03,1 +with_mirrors,2024-01-04,17 +with_mirrors,2024-01-05,140 +with_mirrors,2024-01-06,91 +with_mirrors,2024-01-07,45 +with_mirrors,2024-01-08,25 +with_mirrors,2024-01-09,15 +with_mirrors,2024-01-10,15 +with_mirrors,2024-01-11,20 +with_mirrors,2024-01-12,22 +with_mirrors,2024-01-13,77 +with_mirrors,2024-01-14,14 +with_mirrors,2024-01-15,15 +with_mirrors,2024-01-16,7 +with_mirrors,2024-01-17,16 +with_mirrors,2024-01-18,2 +with_mirrors,2024-01-19,49 +with_mirrors,2024-01-20,64 +with_mirrors,2024-01-21,20 +with_mirrors,2024-01-22,97 +with_mirrors,2024-01-23,19 +with_mirrors,2024-01-24,42 +with_mirrors,2024-01-25,1 +with_mirrors,2024-01-26,8 +with_mirrors,2024-01-27,65 +with_mirrors,2024-01-28,114 +with_mirrors,2024-01-29,16 +with_mirrors,2024-01-30,13 +with_mirrors,2024-01-31,8 +with_mirrors,2024-02-01,85 +with_mirrors,2024-02-02,88 +with_mirrors,2024-02-03,76 +with_mirrors,2024-02-04,80 +with_mirrors,2024-02-05,13 +with_mirrors,2024-02-06,22 +with_mirrors,2024-02-07,10 +with_mirrors,2024-02-08,51 +with_mirrors,2024-02-09,86 +with_mirrors,2024-02-10,2 +with_mirrors,2024-02-11,72 +with_mirrors,2024-02-12,46 +with_mirrors,2024-02-13,2 +with_mirrors,2024-02-14,15 +with_mirrors,2024-02-15,56 +with_mirrors,2024-02-16,24 +with_mirrors,2024-02-17,5 +with_mirrors,2024-02-18,37 +with_mirrors,2024-02-19,118 +with_mirrors,2024-02-20,70 +with_mirrors,2024-02-21,91 +with_mirrors,2024-02-22,12 +with_mirrors,2024-02-23,84 +with_mirrors,2024-02-24,6 +with_mirrors,2024-02-25,10 +with_mirrors,2024-02-26,33 +with_mirrors,2024-02-27,107 +with_mirrors,2024-02-28,37 +with_mirrors,2024-02-29,55 +with_mirrors,2024-03-01,4 +with_mirrors,2024-03-02,56 +with_mirrors,2024-03-03,2 +with_mirrors,2024-03-04,11 +with_mirrors,2024-03-05,25 +with_mirrors,2024-03-06,65 +with_mirrors,2024-03-07,80 +with_mirrors,2024-03-08,40 +with_mirrors,2024-03-09,4 +with_mirrors,2024-03-10,49 +with_mirrors,2024-03-11,50 +with_mirrors,2024-03-12,24 +with_mirrors,2024-03-13,21 +with_mirrors,2024-03-14,45 +with_mirrors,2024-03-15,17 +with_mirrors,2024-03-16,66 +with_mirrors,2024-03-17,38 +with_mirrors,2024-03-18,34 +with_mirrors,2024-03-19,18 +with_mirrors,2024-03-20,23 +with_mirrors,2024-03-21,22 +with_mirrors,2024-03-22,40 +with_mirrors,2024-03-23,53 +with_mirrors,2024-03-24,23 +with_mirrors,2024-03-25,33 +with_mirrors,2024-03-26,39 +with_mirrors,2024-03-27,14 +with_mirrors,2024-03-28,113 +with_mirrors,2024-03-29,85 +with_mirrors,2024-03-30,5 +with_mirrors,2024-03-31,9 +with_mirrors,2024-04-01,12 +with_mirrors,2024-04-02,27 +with_mirrors,2024-04-03,16 +with_mirrors,2024-04-04,23 +with_mirrors,2024-04-05,42 +with_mirrors,2024-04-06,38 +with_mirrors,2024-04-07,41 +with_mirrors,2024-04-08,102 +with_mirrors,2024-04-09,30 +with_mirrors,2024-04-10,17 +with_mirrors,2024-04-11,5 +with_mirrors,2024-04-12,32 +with_mirrors,2024-04-13,2 +with_mirrors,2024-04-14,8 +with_mirrors,2024-04-15,65 +with_mirrors,2024-04-16,14 +with_mirrors,2024-04-17,91 +with_mirrors,2024-04-18,62 +with_mirrors,2024-04-19,56 +with_mirrors,2024-04-20,51 +with_mirrors,2024-04-21,96 +with_mirrors,2024-04-22,23 +with_mirrors,2024-04-23,33 +with_mirrors,2024-04-24,14 +with_mirrors,2024-04-25,64 +with_mirrors,2024-04-26,29 +with_mirrors,2024-04-27,14 +with_mirrors,2024-04-28,33 +with_mirrors,2024-04-29,77 +with_mirrors,2024-04-30,54 without_mirrors,2020-06-18,22 without_mirrors,2020-06-19,14 without_mirrors,2020-06-21,4 @@ -1439,7 +1585,6 @@ without_mirrors,2021-03-09,2 without_mirrors,2021-03-10,3 without_mirrors,2021-03-11,11 without_mirrors,2021-03-12,30 -without_mirrors,2021-03-12,8 without_mirrors,2021-03-13,1 without_mirrors,2021-03-15,2 without_mirrors,2021-03-16,7 @@ -1676,10 +1821,8 @@ without_mirrors,2021-11-19,9 without_mirrors,2021-11-20,11 without_mirrors,2021-11-21,7 without_mirrors,2021-11-22,14 -without_mirrors,2021-11-23,7 without_mirrors,2021-11-23,11 without_mirrors,2021-11-24,7 -without_mirrors,2021-11-24,3 without_mirrors,2021-11-25,14 without_mirrors,2021-11-26,3 without_mirrors,2021-11-27,5 @@ -2377,3 +2520,152 @@ without_mirrors,2023-11-27,16 without_mirrors,2023-11-28,12 without_mirrors,2023-11-29,29 without_mirrors,2023-11-30,45 +without_mirrors,2023-12-01,4 +without_mirrors,2023-12-02,2 +without_mirrors,2023-12-04,14 +without_mirrors,2023-12-05,5 +without_mirrors,2023-12-06,21 +without_mirrors,2023-12-07,5 +without_mirrors,2023-12-08,11 +without_mirrors,2023-12-09,6 +without_mirrors,2023-12-10,3 +without_mirrors,2023-12-11,13 +without_mirrors,2023-12-12,3 +without_mirrors,2023-12-13,5 +without_mirrors,2023-12-14,10 +without_mirrors,2023-12-15,6 +without_mirrors,2023-12-16,1 +without_mirrors,2023-12-17,2 +without_mirrors,2023-12-18,15 +without_mirrors,2023-12-19,5 +without_mirrors,2023-12-20,9 +without_mirrors,2023-12-21,7 +without_mirrors,2023-12-22,10 +without_mirrors,2023-12-23,8 +without_mirrors,2023-12-24,26 +without_mirrors,2023-12-26,5 +without_mirrors,2023-12-27,11 +without_mirrors,2023-12-28,5 +without_mirrors,2023-12-29,4 +without_mirrors,2023-12-30,6 +without_mirrors,2023-12-31,24 +without_mirrors,2024-01-01,1 +without_mirrors,2024-01-02,18 +without_mirrors,2024-01-03,1 +without_mirrors,2024-01-04,17 +without_mirrors,2024-01-05,92 +without_mirrors,2024-01-06,24 +without_mirrors,2024-01-07,4 +without_mirrors,2024-01-08,17 +without_mirrors,2024-01-09,11 +without_mirrors,2024-01-10,9 +without_mirrors,2024-01-11,4 +without_mirrors,2024-01-12,6 +without_mirrors,2024-01-13,65 +without_mirrors,2024-01-14,8 +without_mirrors,2024-01-15,13 +without_mirrors,2024-01-16,7 +without_mirrors,2024-01-17,14 +without_mirrors,2024-01-18,2 +without_mirrors,2024-01-19,8 +without_mirrors,2024-01-20,12 +without_mirrors,2024-01-21,8 +without_mirrors,2024-01-22,11 +without_mirrors,2024-01-23,14 +without_mirrors,2024-01-24,3 +without_mirrors,2024-01-25,1 +without_mirrors,2024-01-26,2 +without_mirrors,2024-01-27,4 +without_mirrors,2024-01-28,41 +without_mirrors,2024-01-29,12 +without_mirrors,2024-01-30,8 +without_mirrors,2024-01-31,6 +without_mirrors,2024-02-01,47 +without_mirrors,2024-02-02,12 +without_mirrors,2024-02-03,38 +without_mirrors,2024-02-04,5 +without_mirrors,2024-02-05,13 +without_mirrors,2024-02-06,16 +without_mirrors,2024-02-07,6 +without_mirrors,2024-02-08,4 +without_mirrors,2024-02-09,49 +without_mirrors,2024-02-10,2 +without_mirrors,2024-02-11,33 +without_mirrors,2024-02-12,45 +without_mirrors,2024-02-13,2 +without_mirrors,2024-02-14,13 +without_mirrors,2024-02-15,9 +without_mirrors,2024-02-16,24 +without_mirrors,2024-02-18,35 +without_mirrors,2024-02-19,7 +without_mirrors,2024-02-20,27 +without_mirrors,2024-02-21,13 +without_mirrors,2024-02-22,10 +without_mirrors,2024-02-23,37 +without_mirrors,2024-02-24,6 +without_mirrors,2024-02-25,4 +without_mirrors,2024-02-26,15 +without_mirrors,2024-02-27,28 +without_mirrors,2024-02-28,34 +without_mirrors,2024-02-29,18 +without_mirrors,2024-03-01,4 +without_mirrors,2024-03-02,19 +without_mirrors,2024-03-03,2 +without_mirrors,2024-03-04,9 +without_mirrors,2024-03-05,25 +without_mirrors,2024-03-06,51 +without_mirrors,2024-03-07,42 +without_mirrors,2024-03-08,40 +without_mirrors,2024-03-09,4 +without_mirrors,2024-03-10,4 +without_mirrors,2024-03-11,48 +without_mirrors,2024-03-12,24 +without_mirrors,2024-03-13,21 +without_mirrors,2024-03-14,2 +without_mirrors,2024-03-15,7 +without_mirrors,2024-03-16,29 +without_mirrors,2024-03-17,30 +without_mirrors,2024-03-18,30 +without_mirrors,2024-03-19,14 +without_mirrors,2024-03-20,23 +without_mirrors,2024-03-21,11 +without_mirrors,2024-03-22,24 +without_mirrors,2024-03-23,6 +without_mirrors,2024-03-24,4 +without_mirrors,2024-03-25,3 +without_mirrors,2024-03-26,6 +without_mirrors,2024-03-27,13 +without_mirrors,2024-03-28,13 +without_mirrors,2024-03-29,9 +without_mirrors,2024-03-30,5 +without_mirrors,2024-03-31,3 +without_mirrors,2024-04-01,12 +without_mirrors,2024-04-02,27 +without_mirrors,2024-04-03,6 +without_mirrors,2024-04-04,10 +without_mirrors,2024-04-05,38 +without_mirrors,2024-04-06,1 +without_mirrors,2024-04-07,4 +without_mirrors,2024-04-08,24 +without_mirrors,2024-04-09,28 +without_mirrors,2024-04-10,17 +without_mirrors,2024-04-11,5 +without_mirrors,2024-04-12,22 +without_mirrors,2024-04-13,2 +without_mirrors,2024-04-14,7 +without_mirrors,2024-04-15,28 +without_mirrors,2024-04-16,14 +without_mirrors,2024-04-17,89 +without_mirrors,2024-04-18,25 +without_mirrors,2024-04-19,40 +without_mirrors,2024-04-20,14 +without_mirrors,2024-04-21,8 +without_mirrors,2024-04-22,13 +without_mirrors,2024-04-23,33 +without_mirrors,2024-04-24,14 +without_mirrors,2024-04-25,64 +without_mirrors,2024-04-26,29 +without_mirrors,2024-04-27,14 +without_mirrors,2024-04-28,33 +without_mirrors,2024-04-29,40 +without_mirrors,2024-04-30,50 diff --git a/doc/source/tracking/pypistats/sys_downloads_data.csv b/doc/source/tracking/pypistats/sys_downloads_data.csv index 98cdf03fe..cb8ff00c5 100644 --- a/doc/source/tracking/pypistats/sys_downloads_data.csv +++ b/doc/source/tracking/pypistats/sys_downloads_data.csv @@ -182,6 +182,35 @@ Darwin,2023-11-20,4 Darwin,2023-11-21,2 Darwin,2023-11-24,1 Darwin,2023-11-27,2 +Darwin,2023-12-04,2 +Darwin,2023-12-09,2 +Darwin,2023-12-19,1 +Darwin,2023-12-22,5 +Darwin,2023-12-23,2 +Darwin,2023-12-28,2 +Darwin,2024-01-09,2 +Darwin,2024-01-11,2 +Darwin,2024-01-17,4 +Darwin,2024-01-21,2 +Darwin,2024-01-23,2 +Darwin,2024-02-01,2 +Darwin,2024-02-02,2 +Darwin,2024-02-03,37 +Darwin,2024-02-04,4 +Darwin,2024-02-09,40 +Darwin,2024-02-20,1 +Darwin,2024-02-21,2 +Darwin,2024-03-01,1 +Darwin,2024-03-13,2 +Darwin,2024-03-16,5 +Darwin,2024-03-20,2 +Darwin,2024-03-30,2 +Darwin,2024-04-04,4 +Darwin,2024-04-08,5 +Darwin,2024-04-10,2 +Darwin,2024-04-12,2 +Darwin,2024-04-25,2 +Darwin,2024-04-29,1 Linux,2020-06-18,9 Linux,2020-06-19,2 Linux,2020-06-22,2 @@ -981,6 +1010,124 @@ Linux,2023-11-27,10 Linux,2023-11-28,3 Linux,2023-11-29,23 Linux,2023-11-30,5 +Linux,2023-12-01,2 +Linux,2023-12-02,1 +Linux,2023-12-04,2 +Linux,2023-12-05,3 +Linux,2023-12-06,2 +Linux,2023-12-07,1 +Linux,2023-12-08,5 +Linux,2023-12-09,2 +Linux,2023-12-10,1 +Linux,2023-12-11,7 +Linux,2023-12-12,1 +Linux,2023-12-13,4 +Linux,2023-12-14,3 +Linux,2023-12-18,4 +Linux,2023-12-20,2 +Linux,2023-12-22,3 +Linux,2023-12-27,1 +Linux,2023-12-28,1 +Linux,2023-12-30,5 +Linux,2024-01-01,1 +Linux,2024-01-02,1 +Linux,2024-01-03,1 +Linux,2024-01-04,3 +Linux,2024-01-05,17 +Linux,2024-01-06,2 +Linux,2024-01-08,5 +Linux,2024-01-09,2 +Linux,2024-01-10,2 +Linux,2024-01-12,3 +Linux,2024-01-14,2 +Linux,2024-01-15,1 +Linux,2024-01-16,2 +Linux,2024-01-17,1 +Linux,2024-01-18,1 +Linux,2024-01-19,2 +Linux,2024-01-21,4 +Linux,2024-01-22,6 +Linux,2024-01-23,5 +Linux,2024-01-24,1 +Linux,2024-01-25,1 +Linux,2024-01-26,1 +Linux,2024-01-27,1 +Linux,2024-01-28,1 +Linux,2024-01-29,5 +Linux,2024-01-30,1 +Linux,2024-01-31,1 +Linux,2024-02-01,36 +Linux,2024-02-02,6 +Linux,2024-02-04,1 +Linux,2024-02-05,9 +Linux,2024-02-06,15 +Linux,2024-02-07,2 +Linux,2024-02-08,2 +Linux,2024-02-09,7 +Linux,2024-02-12,8 +Linux,2024-02-14,10 +Linux,2024-02-15,7 +Linux,2024-02-16,21 +Linux,2024-02-19,5 +Linux,2024-02-20,26 +Linux,2024-02-21,3 +Linux,2024-02-22,4 +Linux,2024-02-23,33 +Linux,2024-02-24,4 +Linux,2024-02-26,13 +Linux,2024-02-27,27 +Linux,2024-02-28,10 +Linux,2024-02-29,15 +Linux,2024-03-02,1 +Linux,2024-03-03,1 +Linux,2024-03-04,9 +Linux,2024-03-05,7 +Linux,2024-03-06,45 +Linux,2024-03-07,32 +Linux,2024-03-08,40 +Linux,2024-03-10,2 +Linux,2024-03-11,46 +Linux,2024-03-12,19 +Linux,2024-03-13,16 +Linux,2024-03-14,2 +Linux,2024-03-15,5 +Linux,2024-03-17,14 +Linux,2024-03-18,27 +Linux,2024-03-19,10 +Linux,2024-03-20,17 +Linux,2024-03-21,11 +Linux,2024-03-22,21 +Linux,2024-03-25,1 +Linux,2024-03-27,7 +Linux,2024-03-28,9 +Linux,2024-03-29,8 +Linux,2024-04-01,3 +Linux,2024-04-02,21 +Linux,2024-04-03,5 +Linux,2024-04-04,6 +Linux,2024-04-05,33 +Linux,2024-04-07,4 +Linux,2024-04-08,19 +Linux,2024-04-09,24 +Linux,2024-04-10,11 +Linux,2024-04-11,4 +Linux,2024-04-12,17 +Linux,2024-04-14,2 +Linux,2024-04-15,20 +Linux,2024-04-16,10 +Linux,2024-04-17,22 +Linux,2024-04-18,20 +Linux,2024-04-19,21 +Linux,2024-04-21,4 +Linux,2024-04-22,13 +Linux,2024-04-23,27 +Linux,2024-04-24,9 +Linux,2024-04-25,36 +Linux,2024-04-26,23 +Linux,2024-04-27,12 +Linux,2024-04-28,18 +Linux,2024-04-29,18 +Linux,2024-04-30,47 Windows,2020-06-21,1 Windows,2020-06-25,1 Windows,2020-06-30,1 @@ -1634,6 +1781,119 @@ Windows,2023-11-27,4 Windows,2023-11-28,7 Windows,2023-11-29,2 Windows,2023-11-30,4 +Windows,2023-12-01,2 +Windows,2023-12-04,9 +Windows,2023-12-05,2 +Windows,2023-12-06,1 +Windows,2023-12-07,1 +Windows,2023-12-08,3 +Windows,2023-12-09,2 +Windows,2023-12-11,2 +Windows,2023-12-12,2 +Windows,2023-12-14,7 +Windows,2023-12-15,6 +Windows,2023-12-17,2 +Windows,2023-12-18,8 +Windows,2023-12-19,4 +Windows,2023-12-20,7 +Windows,2023-12-21,6 +Windows,2023-12-22,1 +Windows,2023-12-23,4 +Windows,2023-12-24,2 +Windows,2023-12-26,4 +Windows,2023-12-27,8 +Windows,2023-12-28,1 +Windows,2023-12-29,2 +Windows,2023-12-31,6 +Windows,2024-01-02,16 +Windows,2024-01-04,2 +Windows,2024-01-05,2 +Windows,2024-01-06,4 +Windows,2024-01-08,5 +Windows,2024-01-09,4 +Windows,2024-01-10,7 +Windows,2024-01-11,2 +Windows,2024-01-13,3 +Windows,2024-01-14,1 +Windows,2024-01-15,9 +Windows,2024-01-16,4 +Windows,2024-01-17,9 +Windows,2024-01-19,2 +Windows,2024-01-20,2 +Windows,2024-01-23,7 +Windows,2024-01-24,1 +Windows,2024-01-26,1 +Windows,2024-01-27,1 +Windows,2024-01-28,2 +Windows,2024-01-29,3 +Windows,2024-01-30,7 +Windows,2024-01-31,4 +Windows,2024-02-01,8 +Windows,2024-02-02,2 +Windows,2024-02-05,3 +Windows,2024-02-06,1 +Windows,2024-02-07,3 +Windows,2024-02-08,1 +Windows,2024-02-09,2 +Windows,2024-02-10,2 +Windows,2024-02-13,2 +Windows,2024-02-14,3 +Windows,2024-02-15,2 +Windows,2024-02-16,3 +Windows,2024-02-18,16 +Windows,2024-02-19,2 +Windows,2024-02-21,6 +Windows,2024-02-22,6 +Windows,2024-02-23,2 +Windows,2024-02-24,2 +Windows,2024-02-25,4 +Windows,2024-02-26,1 +Windows,2024-02-28,4 +Windows,2024-02-29,3 +Windows,2024-03-01,3 +Windows,2024-03-02,4 +Windows,2024-03-05,18 +Windows,2024-03-06,6 +Windows,2024-03-07,10 +Windows,2024-03-10,2 +Windows,2024-03-11,2 +Windows,2024-03-12,4 +Windows,2024-03-13,2 +Windows,2024-03-15,2 +Windows,2024-03-17,2 +Windows,2024-03-18,2 +Windows,2024-03-19,2 +Windows,2024-03-20,3 +Windows,2024-03-22,3 +Windows,2024-03-23,5 +Windows,2024-03-24,2 +Windows,2024-03-25,2 +Windows,2024-03-26,4 +Windows,2024-03-27,6 +Windows,2024-03-28,4 +Windows,2024-03-30,2 +Windows,2024-03-31,3 +Windows,2024-04-01,9 +Windows,2024-04-02,6 +Windows,2024-04-05,4 +Windows,2024-04-09,4 +Windows,2024-04-10,4 +Windows,2024-04-12,2 +Windows,2024-04-13,2 +Windows,2024-04-14,5 +Windows,2024-04-15,8 +Windows,2024-04-16,4 +Windows,2024-04-17,11 +Windows,2024-04-18,5 +Windows,2024-04-19,3 +Windows,2024-04-23,2 +Windows,2024-04-24,1 +Windows,2024-04-25,23 +Windows,2024-04-26,4 +Windows,2024-04-27,1 +Windows,2024-04-28,15 +Windows,2024-04-29,4 +Windows,2024-04-30,2 null,2020-06-18,12 null,2020-06-19,12 null,2020-06-21,2 @@ -1762,7 +2022,6 @@ null,2021-03-02,1 null,2021-03-03,1 null,2021-03-11,8 null,2021-03-12,28 -null,2021-03-12,6 null,2021-03-17,4 null,2021-03-18,2 null,2021-03-19,8 @@ -2393,3 +2652,90 @@ null,2023-11-26,12 null,2023-11-28,2 null,2023-11-29,4 null,2023-11-30,36 +null,2023-12-02,1 +null,2023-12-04,1 +null,2023-12-06,18 +null,2023-12-07,3 +null,2023-12-08,3 +null,2023-12-10,2 +null,2023-12-11,4 +null,2023-12-13,1 +null,2023-12-16,1 +null,2023-12-18,3 +null,2023-12-21,1 +null,2023-12-22,1 +null,2023-12-23,2 +null,2023-12-24,24 +null,2023-12-26,1 +null,2023-12-27,2 +null,2023-12-28,1 +null,2023-12-29,2 +null,2023-12-30,1 +null,2023-12-31,18 +null,2024-01-02,1 +null,2024-01-04,12 +null,2024-01-05,73 +null,2024-01-06,18 +null,2024-01-07,4 +null,2024-01-08,7 +null,2024-01-09,3 +null,2024-01-12,3 +null,2024-01-13,62 +null,2024-01-14,5 +null,2024-01-15,3 +null,2024-01-16,1 +null,2024-01-18,1 +null,2024-01-19,4 +null,2024-01-20,10 +null,2024-01-21,2 +null,2024-01-22,5 +null,2024-01-24,1 +null,2024-01-27,2 +null,2024-01-28,38 +null,2024-01-29,4 +null,2024-01-31,1 +null,2024-02-01,1 +null,2024-02-02,2 +null,2024-02-03,1 +null,2024-02-05,1 +null,2024-02-07,1 +null,2024-02-08,1 +null,2024-02-11,33 +null,2024-02-12,37 +null,2024-02-18,19 +null,2024-02-21,2 +null,2024-02-23,2 +null,2024-02-26,1 +null,2024-02-27,1 +null,2024-02-28,20 +null,2024-03-02,14 +null,2024-03-03,1 +null,2024-03-09,4 +null,2024-03-12,1 +null,2024-03-13,1 +null,2024-03-16,24 +null,2024-03-17,14 +null,2024-03-18,1 +null,2024-03-19,2 +null,2024-03-20,1 +null,2024-03-23,1 +null,2024-03-24,2 +null,2024-03-26,2 +null,2024-03-29,1 +null,2024-03-30,1 +null,2024-04-03,1 +null,2024-04-05,1 +null,2024-04-06,1 +null,2024-04-11,1 +null,2024-04-12,1 +null,2024-04-17,56 +null,2024-04-19,16 +null,2024-04-20,14 +null,2024-04-21,4 +null,2024-04-23,4 +null,2024-04-24,4 +null,2024-04-25,3 +null,2024-04-26,2 +null,2024-04-27,1 +null,2024-04-29,17 +null,2024-04-30,1 diff --git a/doc/source/tracking/traffic/clones.csv b/doc/source/tracking/traffic/clones.csv index 5aab8477b..67575c77f 100644 --- a/doc/source/tracking/traffic/clones.csv +++ b/doc/source/tracking/traffic/clones.csv @@ -876,3 +876,129 @@ _date,total_clones,unique_clones 2023-12-07,7,7 2023-12-09,5,4 2023-12-10,2,2 +2023-12-11,14,6 +2023-12-12,8,8 +2023-12-13,19,13 +2023-12-14,60,35 +2023-12-15,40,24 +2023-12-16,2,2 +2023-12-17,1,1 +2023-12-18,56,29 +2023-12-20,10,8 +2023-12-21,14,11 +2023-12-22,24,21 +2023-12-23,2,2 +2023-12-24,2,1 +2023-12-25,5,5 +2023-12-28,1,1 +2023-12-29,6,4 +2023-12-30,3,2 +2024-01-01,3,2 +2024-01-02,6,5 +2024-01-03,7,6 +2024-01-04,69,35 +2024-01-05,79,37 +2024-01-06,2,2 +2024-01-08,3,3 +2024-01-09,1,1 +2024-01-10,8,7 +2024-01-11,1,1 +2024-01-12,7,6 +2024-01-13,4,4 +2024-01-14,2,1 +2024-01-15,4,4 +2024-01-16,1,1 +2024-01-18,4,3 +2024-01-19,1,1 +2024-01-20,3,3 +2024-01-22,3,2 +2024-01-23,1,1 +2024-01-24,22,13 +2024-01-25,2,2 +2024-01-26,27,17 +2024-01-27,4,3 +2024-01-28,1,1 +2024-01-29,5,5 +2024-01-30,11,10 +2024-01-31,46,26 +2024-02-01,10,8 +2024-02-02,11,10 +2024-02-03,1,1 +2024-02-05,35,25 +2024-02-06,15,10 +2024-02-07,5,5 +2024-02-08,62,36 +2024-02-09,11,7 +2024-02-10,4,4 +2024-02-12,17,15 +2024-02-13,4,4 +2024-02-14,12,10 +2024-02-15,11,8 +2024-02-16,24,16 +2024-02-17,5,5 +2024-02-19,5,5 +2024-02-20,9,8 +2024-02-21,12,7 +2024-02-22,41,24 +2024-02-23,7,4 +2024-02-24,1,1 +2024-02-26,11,8 +2024-02-27,24,16 +2024-02-28,51,31 +2024-02-29,33,22 +2024-03-01,7,5 +2024-03-02,2,2 +2024-03-03,10,3 +2024-03-04,13,11 +2024-03-05,15,11 +2024-03-06,16,11 +2024-03-07,12,4 +2024-03-08,36,17 +2024-03-09,5,5 +2024-03-10,5,2 +2024-03-11,17,12 +2024-03-12,28,23 +2024-03-13,35,19 +2024-03-14,2,1 +2024-03-15,30,21 +2024-03-16,9,3 +2024-03-17,1,1 +2024-03-18,12,11 +2024-03-19,3,3 +2024-03-20,5,5 +2024-03-21,4,4 +2024-03-22,3,3 +2024-03-23,2,2 +2024-03-25,22,14 +2024-03-26,2,2 +2024-03-27,23,16 +2024-03-28,6,5 +2024-03-29,6,5 +2024-03-30,2,2 +2024-03-31,1,1 +2024-04-11,2,2 +2024-04-12,1,1 +2024-04-13,2,2 +2024-04-15,5,2 +2024-04-16,2,1 +2024-04-17,1,1 +2024-04-18,4,3 +2024-04-19,1,1 +2024-04-22,3,3 +2024-04-23,3,2 +2024-04-24,1,1 +2024-04-25,5,4 +2024-04-26,3,1 +2024-04-27,2,2 +2024-04-29,3,3 +2024-04-30,2,1 +2024-05-01,2,2 +2024-05-02,3,2 +2024-05-03,2,1 +2024-05-04,2,1 +2024-05-05,3,3 +2024-05-06,9,8 +2024-05-08,3,3 +2024-05-10,34,16 +2024-05-11,2,2 +2024-05-12,2,2 diff --git a/doc/source/tracking/traffic/plots.svg b/doc/source/tracking/traffic/plots.svg index f1fb9aab4..b682d6b72 100644 --- a/doc/source/tracking/traffic/plots.svg +++ b/doc/source/tracking/traffic/plots.svg @@ -6,11 +6,11 @@ - 2023-12-11T00:40:01.228614 + 2024-05-13T00:40:56.977866 image/svg+xml - Matplotlib v3.8.2, https://matplotlib.org/ + Matplotlib v3.8.4, https://matplotlib.org/ @@ -21,19 +21,19 @@ - - @@ -41,82 +41,82 @@ z - - + - - - - @@ -136,21 +136,21 @@ z - + - + - @@ -170,62 +170,62 @@ z - + - + - - @@ -245,41 +245,41 @@ z - + - + - @@ -299,41 +299,41 @@ z - + - + - @@ -350,120 +350,169 @@ z - + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - @@ -477,17 +526,17 @@ z - + - - + - + @@ -495,12 +544,12 @@ L -3.5 0 - + - + - + @@ -510,12 +559,12 @@ L -3.5 0 - + - + - + @@ -525,12 +574,12 @@ L -3.5 0 - + - + - + @@ -540,1899 +589,2138 @@ L -3.5 0 - - - - + + + + - - - - - - - + - + - - - - - - - - - - - @@ -2457,57 +2745,57 @@ z - - + - + - - @@ -2537,21 +2825,21 @@ z - - - + + - + - + @@ -2567,42 +2855,15 @@ z - - + + - + - + - - - - + @@ -2616,53 +2877,53 @@ z - - + + - + - + - + - @@ -2679,15 +2940,15 @@ z - - + + - + - + - + @@ -2701,15 +2962,15 @@ z - - + + - + - + - + @@ -2723,15 +2984,15 @@ z - - + + - + - + - + @@ -2745,7 +3006,29 @@ z - + + + + + + + + + + + + + + + + + + + + + + + @@ -2758,12 +3041,12 @@ z - + - + - + @@ -2771,12 +3054,12 @@ z - + - + - + @@ -2786,12 +3069,12 @@ z - + - + - + @@ -2801,12 +3084,12 @@ z - + - + - + @@ -2816,2226 +3099,2486 @@ z - - + + - - + + - - - - - - - + - + - - @@ -5059,13 +5602,13 @@ z - - + - + @@ -5093,10 +5636,10 @@ L 322.391406 183.296562 - + - + diff --git a/doc/source/tracking/traffic/views.csv b/doc/source/tracking/traffic/views.csv index 724aa73f6..f645da225 100644 --- a/doc/source/tracking/traffic/views.csv +++ b/doc/source/tracking/traffic/views.csv @@ -1134,3 +1134,146 @@ _date,total_views,unique_views 2023-12-08,23,10 2023-12-09,9,5 2023-12-10,19,7 +2023-12-11,153,12 +2023-12-12,65,13 +2023-12-13,87,11 +2023-12-14,129,12 +2023-12-15,172,6 +2023-12-16,15,2 +2023-12-17,9,6 +2023-12-18,104,9 +2023-12-19,45,9 +2023-12-20,59,11 +2023-12-21,60,17 +2023-12-22,91,10 +2023-12-23,15,2 +2023-12-24,2,2 +2023-12-25,16,5 +2023-12-26,20,3 +2023-12-27,12,7 +2023-12-28,1,1 +2023-12-29,10,5 +2023-12-30,5,2 +2023-12-31,5,3 +2024-01-01,9,3 +2024-01-02,91,7 +2024-01-03,60,13 +2024-01-04,219,13 +2024-01-05,262,12 +2024-01-06,14,6 +2024-01-07,29,8 +2024-01-08,64,14 +2024-01-09,43,12 +2024-01-10,158,10 +2024-01-11,19,5 +2024-01-12,23,7 +2024-01-13,32,4 +2024-01-14,3,3 +2024-01-15,13,7 +2024-01-16,29,10 +2024-01-17,48,11 +2024-01-18,28,9 +2024-01-19,25,10 +2024-01-20,34,13 +2024-01-21,5,2 +2024-01-22,13,4 +2024-01-23,69,14 +2024-01-24,111,8 +2024-01-25,22,10 +2024-01-26,82,5 +2024-01-27,8,5 +2024-01-28,4,3 +2024-01-29,33,8 +2024-01-30,42,13 +2024-01-31,90,13 +2024-02-01,59,16 +2024-02-02,76,10 +2024-02-03,37,8 +2024-02-04,4,3 +2024-02-05,87,13 +2024-02-06,47,13 +2024-02-07,57,10 +2024-02-08,98,11 +2024-02-09,27,6 +2024-02-10,54,7 +2024-02-11,42,7 +2024-02-12,20,6 +2024-02-13,62,11 +2024-02-14,69,9 +2024-02-15,27,7 +2024-02-16,105,10 +2024-02-17,27,3 +2024-02-18,22,5 +2024-02-19,23,6 +2024-02-20,56,10 +2024-02-21,42,11 +2024-02-22,52,7 +2024-02-23,42,11 +2024-02-24,24,5 +2024-02-25,21,6 +2024-02-26,42,10 +2024-02-27,41,7 +2024-02-28,140,12 +2024-02-29,64,7 +2024-03-01,13,8 +2024-03-02,6,3 +2024-03-03,48,3 +2024-03-04,9,5 +2024-03-05,112,17 +2024-03-06,148,16 +2024-03-07,14,11 +2024-03-08,61,15 +2024-03-09,5,4 +2024-03-10,14,4 +2024-03-11,39,16 +2024-03-12,129,15 +2024-03-13,155,19 +2024-03-14,30,13 +2024-03-15,81,9 +2024-03-16,5,4 +2024-03-17,34,5 +2024-03-18,55,15 +2024-03-19,57,11 +2024-03-20,55,15 +2024-03-21,71,10 +2024-03-22,39,9 +2024-03-23,4,3 +2024-03-25,70,16 +2024-03-26,25,8 +2024-03-27,69,12 +2024-03-28,23,14 +2024-03-29,23,8 +2024-03-30,18,5 +2024-03-31,5,2 +2024-04-11,1,1 +2024-04-12,12,5 +2024-04-13,12,3 +2024-04-14,14,6 +2024-04-15,62,12 +2024-04-16,35,13 +2024-04-17,47,9 +2024-04-18,65,6 +2024-04-19,26,8 +2024-04-20,6,5 +2024-04-21,6,4 +2024-04-22,24,12 +2024-04-23,70,13 +2024-04-24,20,5 +2024-04-25,55,9 +2024-04-26,31,7 +2024-04-27,5,2 +2024-04-28,14,5 +2024-04-29,32,12 +2024-04-30,45,14 +2024-05-01,9,5 +2024-05-02,40,7 +2024-05-03,51,4 +2024-05-04,4,3 +2024-05-05,8,3 +2024-05-06,39,15 +2024-05-07,60,13 +2024-05-08,32,10 +2024-05-09,22,7 +2024-05-10,83,8 +2024-05-11,4,3 +2024-05-12,13,4 diff --git a/doc/source/user_guide/changelog/index.rst b/doc/source/user_guide/changelog/index.rst index 1d6579898..ee5bb11b3 100644 --- a/doc/source/user_guide/changelog/index.rst +++ b/doc/source/user_guide/changelog/index.rst @@ -7,9 +7,17 @@ This is the list of changes made to icepyx in between each release. Full details can be found in the `commit logs `_. -Latest Release (Version 1.0.0) +Latest Release (Version 1.1.0) ------------------------------ +.. toctree:: + :maxdepth: 2 + + v1.1.0 + +Version 1.0.0 +------------- + .. toctree:: :maxdepth: 2 @@ -22,7 +30,7 @@ Version 0.8.1 :maxdepth: 2 v0.8.1 - + Version 0.8.0 ------------- @@ -30,7 +38,7 @@ Version 0.8.0 :maxdepth: 2 v0.8.0 - + Version 0.7.0 ------------- @@ -46,7 +54,7 @@ Version 0.6.4 :maxdepth: 2 v0.6.4 - + Version 0.6.3 ------------- @@ -54,7 +62,7 @@ Version 0.6.3 :maxdepth: 2 v0.6.3 - + Version 0.6.2 ------------- @@ -62,7 +70,7 @@ Version 0.6.2 :maxdepth: 2 v0.6.2 - + Version 0.6.0 + 0.6.1 --------------------- @@ -70,7 +78,7 @@ Version 0.6.0 + 0.6.1 :maxdepth: 2 v0.6.0 - + Version 0.5.0 ------------- @@ -78,7 +86,7 @@ Version 0.5.0 :maxdepth: 2 v0.5.0 - + Version 0.4.1 ------------- .. toctree:: diff --git a/doc/source/user_guide/changelog/v0.1-alpha.rst b/doc/source/user_guide/changelog/v0.1-alpha.rst index 1bb5a0558..881b7c48c 100644 --- a/doc/source/user_guide/changelog/v0.1-alpha.rst +++ b/doc/source/user_guide/changelog/v0.1-alpha.rst @@ -51,4 +51,4 @@ Other Contributors ~~~~~~~~~~~~ -.. contributors:: 12ba33d..v0.1-alpha \ No newline at end of file +.. contributors:: 12ba33d..v0.1-alpha diff --git a/doc/source/user_guide/changelog/v0.2-alpha.rst b/doc/source/user_guide/changelog/v0.2-alpha.rst index 37a666996..382df2960 100644 --- a/doc/source/user_guide/changelog/v0.2-alpha.rst +++ b/doc/source/user_guide/changelog/v0.2-alpha.rst @@ -18,7 +18,7 @@ New Features - Create `Granules` class to get/order/download granules and call as an attribute of the icesat2data object - Create `Variables` class to interface with ICESat-2 nested variables - Create `Parameters` class for managing API inputs within `APIformatting` module - + - allow installation with pip and git Bug fixes @@ -52,4 +52,4 @@ Documentation Contributors ~~~~~~~~~~~~ -.. contributors:: v0.1-alpha..v0.2-alpha \ No newline at end of file +.. contributors:: v0.1-alpha..v0.2-alpha diff --git a/doc/source/user_guide/changelog/v0.3.1.rst b/doc/source/user_guide/changelog/v0.3.1.rst index 994216084..9e892a609 100644 --- a/doc/source/user_guide/changelog/v0.3.1.rst +++ b/doc/source/user_guide/changelog/v0.3.1.rst @@ -62,4 +62,4 @@ Documentation Contributors ~~~~~~~~~~~~ -.. contributors:: v0.2-alpha..v0.3.1|HEAD \ No newline at end of file +.. contributors:: v0.2-alpha..v0.3.1|HEAD diff --git a/doc/source/user_guide/changelog/v0.3.2.rst b/doc/source/user_guide/changelog/v0.3.2.rst index 99b3c7fe1..c06516ffe 100644 --- a/doc/source/user_guide/changelog/v0.3.2.rst +++ b/doc/source/user_guide/changelog/v0.3.2.rst @@ -41,4 +41,4 @@ Documentation Contributors ~~~~~~~~~~~~ -.. contributors:: v0.3.1..v0.3.2|HEAD \ No newline at end of file +.. contributors:: v0.3.1..v0.3.2|HEAD diff --git a/doc/source/user_guide/changelog/v0.6.2.rst b/doc/source/user_guide/changelog/v0.6.2.rst index 30b500998..cd67eada2 100644 --- a/doc/source/user_guide/changelog/v0.6.2.rst +++ b/doc/source/user_guide/changelog/v0.6.2.rst @@ -19,7 +19,7 @@ Maintenance - update action add-and-commit v8 and remove branch/ref keywords (#290) - add read-in functionality for deeply nested variables (e.g. ATL08) (#281) -- icepyx tracking (traffic and pypi) updates (#295) +- icepyx tracking (traffic and pypi) updates (#295) Contributors diff --git a/doc/source/user_guide/changelog/v0.6.3.rst b/doc/source/user_guide/changelog/v0.6.3.rst index f8ef57e97..793a49ac0 100644 --- a/doc/source/user_guide/changelog/v0.6.3.rst +++ b/doc/source/user_guide/changelog/v0.6.3.rst @@ -11,7 +11,7 @@ New Features ~~~~~~~~~~~~ - create merge index during data read-in (#305) - + - implement photon_id index (commented) and linear photon_idx - add all contributors bot @@ -21,7 +21,7 @@ Bug fixes ~~~~~~~~~ - address readable_granule_name implementation error (#292) - + - place one order per granule_id rather than submitting a list Maintenance @@ -34,19 +34,19 @@ Documentation ^^^^^^^^^^^^^ - Tracking docs updates (#307) - + - change citation heading to 'citing icepyx' for clarity - separate downloads and citations in tracking section - add no-tracking note - simplify and update development plan (#306) - + - simplify and update development plan - note we've not activated discussions - fix IceFlow link in citations file - add how-to guide and clarify contributions in icepyx docs (#319) - + - added text to include ancillary data within icepyx - added Jupyer notebook considerations - added GitHub instructions for new users @@ -55,11 +55,11 @@ Documentation - created how to develop file This file focuses on the basic introductory GitHub steps as well as best practices with code and working with icepyx locally - + - added contribution links, and QUEST idea - + Added in text links for the contribution page, as well as a placeholder for 'adding an ancillary dataset' to the contribution page. Will need to add a link for a tutorial on 'how to add a dataset' in an upcoming release. - + - add other hackweek repos - allcontrib bot and simplify instructions diff --git a/doc/source/user_guide/changelog/v0.6.4.rst b/doc/source/user_guide/changelog/v0.6.4.rst index 39d7c2cc9..82c7448c9 100644 --- a/doc/source/user_guide/changelog/v0.6.4.rst +++ b/doc/source/user_guide/changelog/v0.6.4.rst @@ -23,7 +23,7 @@ Bug fixes Deprecations ~~~~~~~~~~~~ -- in `ipx.Query.avail_granules` and `ipx.core.granules.gran_IDs`, +- in `ipx.Query.avail_granules` and `ipx.core.granules.gran_IDs`, the `s3urls` keyword has been changed to `cloud`. @@ -50,4 +50,4 @@ Documentation Contributors ~~~~~~~~~~~~ -.. contributors:: v0.6.3..v0.6.4|HEAD \ No newline at end of file +.. contributors:: v0.6.3..v0.6.4|HEAD diff --git a/doc/source/user_guide/changelog/v0.7.0.rst b/doc/source/user_guide/changelog/v0.7.0.rst index abcb14cbe..583b1838f 100644 --- a/doc/source/user_guide/changelog/v0.7.0.rst +++ b/doc/source/user_guide/changelog/v0.7.0.rst @@ -15,7 +15,7 @@ New Features - add earthaccess (formerly earthdata) as dependency - remove Earthdata module - - update earthdata_login function to use earthaccess + - update earthdata_login function to use earthaccess - update data access and other example notebooks - update earthdata and NSIDC login tests - simplify cloud access example auth handling using earthaccess @@ -38,7 +38,7 @@ Bug fixes - reactivate atl10 and atl12 viz tests and update sizes - disable ATL13 viz tests -- manual solution for getting ATL15 s3 urls via icepyx (#413) +- manual solution for getting ATL15 s3 urls via icepyx (#413) - fix NSIDC login tests (#418) diff --git a/doc/source/user_guide/changelog/v0.8.0.rst b/doc/source/user_guide/changelog/v0.8.0.rst index 4f60f57f4..4665ee6a7 100644 --- a/doc/source/user_guide/changelog/v0.8.0.rst +++ b/doc/source/user_guide/changelog/v0.8.0.rst @@ -1,5 +1,5 @@ What's new in 0.8.0 (12 September 2023) ------------------------------------ +--------------------------------------- These are the changes in icepyx 0.8.0 See :ref:`release` for a full changelog including other versions of icepyx. @@ -9,7 +9,7 @@ New Features ~~~~~~~~~~~~ - create temporal module and add input types and testing (#327) - + - create temporal module - create temporal testing module - add support for more temporal input types (datetime objects) and formats (dict) @@ -19,7 +19,7 @@ New Features - GitHub action UML generation auto-update - Refactor authentication (#435) - + - modularize authentication using a mixin class - add docstrings and update example notebooks - add tests @@ -31,7 +31,7 @@ Deprecations ~~~~~~~~~~~~ - Remove intake catalog from Read module (#438) - + - delete is2cat.py and references - remove intake and related modules @@ -46,7 +46,7 @@ Maintenance - is2ref tests for product formatting and default var lists (#424) - get s3urls for all data products and update doctests to v006 (#426) - + - Always send CMR query to provider NSIDC_CPRD to make sure s3 urls are returned. - Traffic updates 2023 Feb-Aug (#442) @@ -55,14 +55,14 @@ Documentation ^^^^^^^^^^^^^ - update install instructions (#409) - + - add s3fs as requirement to make cloud access default - transition to recommending mamba over conda - add release guide to docs (#255) - docs maintenance and pubs/citations update (#422) - + - add JOSS to bib and badges - switch zenodo links to nonversioned icepyx @@ -71,11 +71,11 @@ Other ^^^^^ - JOSS submission (#361) - + Matches Release v0.6.4_JOSS per #420 plus a few editorial edits available via the pubs/joss branch. - update and clarify authorship, citation, and attribution policies (#419) - + - add CITATION.cff file - update citation docs with Zenodo doi and 'icepyx Developers' as author diff --git a/doc/source/user_guide/changelog/v0.8.1.rst b/doc/source/user_guide/changelog/v0.8.1.rst index 5b86c5dec..fe95d8428 100644 --- a/doc/source/user_guide/changelog/v0.8.1.rst +++ b/doc/source/user_guide/changelog/v0.8.1.rst @@ -1,5 +1,5 @@ What's new in 0.8.1 (14 November 2023) -------------------------------------- +-------------------------------------- These are the changes in icepyx 0.8.1 See :ref:`release` for a full changelog including other versions of icepyx. diff --git a/doc/source/user_guide/changelog/v1.0.0.rst b/doc/source/user_guide/changelog/v1.0.0.rst index 1e015e2b6..fe0c34a71 100644 --- a/doc/source/user_guide/changelog/v1.0.0.rst +++ b/doc/source/user_guide/changelog/v1.0.0.rst @@ -1,5 +1,5 @@ What's new in 1.0.0 (5 January 2024) ------------------------------------ +------------------------------------ These are the changes in icepyx 1.0.0 See :ref:`release` for a full changelog including other versions of icepyx. @@ -9,28 +9,28 @@ New (and Updated) Features ~~~~~~~~~~~~~~~~~~~~~~~~~~ - update Read input arguments (#444) - + - add filelist and product properties to Read object - deprecate filename_pattern and product class Read inputs - transition to data_source input as a string (including glob string) or list - update tutorial with changes and user guidance for using glob - enable QUEST kwarg handling (#452) - + - add kwarg acceptance for data queries and download_all in quest - Add QUEST dataset page to RTD - Variables as an independent class (#451) - + - Refactor Variables class to be user facing functionality - Expand Variables class to read s3 urls (#464) - + - expand extract_product and extract_version to check for s3 url - add cloud notes to variables notebook - add argo functionality to QUEST (#427) - + - add argo.py dataset functionality and implementation through QUEST - demonstrate QUEST usage via example notebook - add save to QUEST DataSet class template @@ -49,7 +49,7 @@ Bug fixes - fix spot number calculation (#458) - Update read module coordinate dimension manipulations to use new xarray index (#473) - Fix behind EDL tests (#480) -- fix permissions for publishing to pypi (#487) +- fix permissions for publishing to pypi (#487) Deprecations @@ -76,7 +76,7 @@ Maintenance - update docs config files to be compliant - temporarily ignore many flake8 error codes until legacy files are updated -- Convert deprecation warnings to errors and remove associated checks #482 +- Convert deprecation warnings to errors and remove associated checks #482 Documentation diff --git a/doc/source/user_guide/changelog/v1.1.0.rst b/doc/source/user_guide/changelog/v1.1.0.rst new file mode 100644 index 000000000..a4c17dbb7 --- /dev/null +++ b/doc/source/user_guide/changelog/v1.1.0.rst @@ -0,0 +1,92 @@ +What's new in 1.1.0 (5 June 2024) +--------------------------------- + +These are the changes in icepyx 1.1.0 See :ref:`release` for a full changelog +including other versions of icepyx. + + +New Features +~~~~~~~~~~~~ + +- fix s3url grabbing for .nc files (#507) + + - reformat some long-lines as well + +- clean up the read module after adding cloud-read capabilities (#502) + + - update tests to match new parse_source function + +- pre-commit check for files over 5MB (#143) +- Clean up APIformatting module and fix ATL11 temporal kwarg submission (#515) + + - add atl11 exception for cmr and required temporal parameters + - remove unused "default" key in _get_possible_keys + - move "short_name" and "version" keys from the CMR list to the required list + - utilize EarthdataAuthMixin for Granules (was still explicitly passing session) + - Touched files were also cleaned up for linting. + + +Bug fixes +~~~~~~~~~ + +- update doctest for newest xarray repr (fix failing test) (#513) +- add dask[dataframe] to requirements (#519) +- improve netcdf multi-group reading and handle metadata QC issue (#516) + + - ATL11 v006, ATL14 v003, and ATL15 v003 have a meaningless string set as the version. A temporary fix gets the most recent version number from CMR to use instead for those products. + - adds handling to read in multiple variable groups from a netcdf + + +Deprecations +~~~~~~~~~~~~ + +- None + + +Maintenance +^^^^^^^^^^^ + +- update codecov uploader to start getting coverage reports (#496) +- fix black linter failures resulting from psf/black action update (#501) +- reformat all code using the pre-commit ci (#503) +- [pre-commit.ci] pre-commit autoupdate (#506, #520, #524) +- pkg_resources deprecation: stop calling setup.py directly (#500) +- add dependency needed to build packages (#511) +- [pre-commit.ci] pre-commit autoupdate of black (#510) +- minor edits to language and badges (#494) + + +Documentation +^^^^^^^^^^^^^ + +- update 2023 icepyx citations (#499) +- Updating the "QUEST Available Datasets" page (#509) + + - Updated links and information relating to Argo and the QUEST example notebook. + +- add quest to pyreverse uml generation + separate in API docs (#498) + + - generate umls for quest module + - make separate API docs pages for icepyx and quest + - add manual trigger for uml creation to allow updates after post-approval changes + - simplify highest level API TOC to show only class levels + - add previous/next navigation buttons to API docs + +- docs: add whyjz as a contributor for bug, code, and review (#522) +- Added a brief mention to QUEST in icepyx docs intro. (#514) +- [docs] update icepyx citations through Apr 2024 (#525) +- [docs] traffic updates Jan-May 2024 (#526) + + - removes a few outstanding duplicates in 2021 data + + +Other +^^^^^ + +- None + + +Contributors +~~~~~~~~~~~~ + +.. contributors:: v0.4.0..v0.4.1|HEAD diff --git a/doc/source/user_guide/documentation/classes_dev_uml.svg b/doc/source/user_guide/documentation/classes_dev_uml.svg index 09c112f5c..d2ca62b29 100644 --- a/doc/source/user_guide/documentation/classes_dev_uml.svg +++ b/doc/source/user_guide/documentation/classes_dev_uml.svg @@ -4,389 +4,473 @@ - - + + classes_dev_uml - - + + +icepyx.quest.dataset_scripts.argo.Argo + +Argo + +_apikey : str +_params : list +_presRange : NoneType +_spatial +_temporal +argodata : DataFrame, NoneType +params +presRange +prof_ids : list + +__init__(aoi, toi, params, presRange) +__str__() +_download_profile(profile_number, printURL): dict +_fmt_coordinates(): str +_parse_into_df(profile_data): pd.DataFrame +_valid_params(): list +_validate_parameters(params): list +download(params, presRange, keep_existing): pd.DataFrame +save(filepath) +search_data(params, presRange, printURL): str + + + +icepyx.quest.dataset_scripts.dataset.DataSet + +DataSet + + +__init__ +(spatial_extent, date_range, start_time, end_time) +_fmt_coordinates +() +_fmt_timerange +() +_validate_inputs +() +download +() +save +(filepath) +search_data +() +visualize +() + + + +icepyx.quest.dataset_scripts.argo.Argo->icepyx.quest.dataset_scripts.dataset.DataSet + + + + + icepyx.core.auth.AuthenticationError - -AuthenticationError - - - + +AuthenticationError + + + - + icepyx.core.exceptions.DeprecationError - -DeprecationError - - - + +DeprecationError + + + - + icepyx.core.auth.EarthdataAuthMixin - -EarthdataAuthMixin - -_auth : NoneType -_s3_initial_ts : NoneType, datetime -_s3login_credentials : NoneType -_session : NoneType -auth -s3login_credentials -session - -__init__(auth) -__str__() -earthdata_login(uid, email, s3token): None + +EarthdataAuthMixin + +_auth : NoneType +_s3_initial_ts : NoneType, datetime +_s3login_credentials : NoneType +_session : NoneType +auth +s3login_credentials +session + +__init__(auth) +__str__() +earthdata_login(uid, email, s3token): None - + icepyx.core.query.GenQuery - -GenQuery - -_spatial -_temporal -dates -end_time -spatial -spatial_extent -start_time -temporal - -__init__(spatial_extent, date_range, start_time, end_time) -__str__() + +GenQuery + +_spatial +_temporal +dates +end_time +spatial +spatial_extent +start_time +temporal + +__init__(spatial_extent, date_range, start_time, end_time) +__str__() - + icepyx.core.granules.Granules - -Granules - -avail : list -orderIDs : list - -__init__ -() -download(verbose, path, session, restart) -get_avail(CMRparams, reqparams, cloud) -place_order(CMRparams, reqparams, subsetparams, verbose, subset, session, geom_filepath) + +Granules + +avail : list +orderIDs : list + +__init__() +download(verbose, path, restart) +get_avail(CMRparams, reqparams, cloud) +place_order(CMRparams, reqparams, subsetparams, verbose, subset, geom_filepath) + + + +icepyx.core.granules.Granules->icepyx.core.auth.EarthdataAuthMixin + + - + icepyx.core.query.Query - -Query - -CMRparams -_CMRparams -_about_product -_cust_options : dict -_cycles : list -_granules -_order_vars -_prod : NoneType, str -_readable_granule_name : list -_reqparams -_subsetparams : NoneType -_tracks : list -_version -cycles -dataset -granules -order_vars -product -product_version -reqparams -tracks - -__init__(product, spatial_extent, date_range, start_time, end_time, version, cycles, tracks, auth) -__str__() -avail_granules(ids, cycles, tracks, cloud) -download_granules(path, verbose, subset, restart) -latest_version() -order_granules(verbose, subset, email) -product_all_info() -product_summary_info() -show_custom_options(dictview) -subsetparams() -visualize_elevation() -visualize_spatial_extent() + +Query + +CMRparams +_CMRparams +_about_product +_cust_options : dict +_cycles : list +_granules +_order_vars +_prod : NoneType, str +_readable_granule_name : list +_reqparams +_subsetparams : NoneType +_tracks : list +_version +cycles +dataset +granules +order_vars +product +product_version +reqparams +tracks + +__init__(product, spatial_extent, date_range, start_time, end_time, version, cycles, tracks, auth) +__str__() +avail_granules(ids, cycles, tracks, cloud) +download_granules(path, verbose, subset, restart) +latest_version() +order_granules(verbose, subset, email) +product_all_info() +product_summary_info() +show_custom_options(dictview) +subsetparams() +visualize_elevation() +visualize_spatial_extent() - + icepyx.core.granules.Granules->icepyx.core.query.Query - - -_granules + + +_granules - + icepyx.core.granules.Granules->icepyx.core.query.Query - - -_granules + + +_granules - + icepyx.core.icesat2data.Icesat2Data - -Icesat2Data - - -__init__() + +Icesat2Data + + +__init__() - + icepyx.core.exceptions.NsidcQueryError - -NsidcQueryError - -errmsg -msgtxt : str - -__init__(errmsg, msgtxt) -__str__() + +NsidcQueryError + +errmsg +msgtxt : str + +__init__(errmsg, msgtxt) +__str__() - + icepyx.core.exceptions.QueryError - -QueryError - - - + +QueryError + + + icepyx.core.exceptions.NsidcQueryError->icepyx.core.exceptions.QueryError - - + + - + icepyx.core.APIformatting.Parameters - -Parameters - -_fmted_keys : NoneType, dict -_poss_keys : dict -_reqtype : NoneType, str -fmted_keys -partype -poss_keys - -__init__(partype, values, reqtype) -_check_valid_keys() -_get_possible_keys() -build_params() -check_req_values() -check_values() + +Parameters + +_fmted_keys : NoneType, dict +_poss_keys : dict +_reqtype : NoneType, str +fmted_keys +partype +poss_keys + +__init__(partype, values, reqtype) +_check_valid_keys() +_get_possible_keys() +build_params() +check_req_values() +check_values() - + icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_CMRparams + + +_CMRparams - + icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_reqparams + + +_reqparams - + icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_subsetparams + + +_subsetparams - + icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_subsetparams + + +_subsetparams - + icepyx.core.query.Query->icepyx.core.auth.EarthdataAuthMixin - - + + - + icepyx.core.query.Query->icepyx.core.query.GenQuery - - + + + + + +icepyx.quest.quest.Quest + +Quest + +datasets : dict + +__init__(spatial_extent, date_range, start_time, end_time, proj) +__str__() +add_argo(params, presRange): None +add_icesat2(product, start_time, end_time, version, cycles, tracks, files): None +download_all(path) +save_all(path) +search_all() + + + +icepyx.quest.quest.Quest->icepyx.core.query.GenQuery + + - + icepyx.core.read.Read - -Read - -_filelist : list -_out_obj : Dataset -_product -_read_vars -filelist -is_s3 -product -vars - -__init__(data_source, glob_kwargs, out_obj_type, product, filename_pattern, catalog) -_add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict) -_build_dataset_template(file) -_build_single_file_dataset(file, groups_list) -_combine_nested_vars(is2ds, ds, grp_path, wanted_dict) -_read_single_grp(file, grp_path) -load() + +Read + +_filelist +_out_obj : Dataset +_product +_read_vars +filelist +is_s3 +product +vars + +__init__(data_source, glob_kwargs, out_obj_type, product, filename_pattern, catalog) +_add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict) +_build_dataset_template(file) +_build_single_file_dataset(file, groups_list) +_combine_nested_vars(is2ds, ds, grp_path, wanted_dict) +_read_single_grp(file, grp_path) +load() - + icepyx.core.read.Read->icepyx.core.auth.EarthdataAuthMixin - - + + - + icepyx.core.spatial.Spatial - -Spatial - -_ext_type : str -_gdf_spat : GeoDataFrame -_geom_file : NoneType -_spatial_ext -_xdateln -extent -extent_as_gdf -extent_file -extent_type - -__init__(spatial_extent) -__str__() -fmt_for_CMR() -fmt_for_EGI() + +Spatial + +_ext_type : str +_gdf_spat : GeoDataFrame +_geom_file : NoneType +_spatial_ext +_xdateln +extent +extent_as_gdf +extent_file +extent_type + +__init__(spatial_extent) +__str__() +fmt_for_CMR() +fmt_for_EGI() - + icepyx.core.spatial.Spatial->icepyx.core.query.GenQuery - - -_spatial + + +_spatial - + icepyx.core.spatial.Spatial->icepyx.core.query.GenQuery - - -_spatial + + +_spatial - + icepyx.core.temporal.Temporal - -Temporal - -_end : datetime -_start : datetime -end -start - -__init__(date_range, start_time, end_time) -__str__() + +Temporal + +_end : datetime +_start : datetime +end +start + +__init__(date_range, start_time, end_time) +__str__() - + icepyx.core.temporal.Temporal->icepyx.core.query.GenQuery - - -_temporal + + +_temporal - + icepyx.core.variables.Variables - -Variables - -_avail : NoneType, list -_path : NoneType -_product : NoneType, str -_version -path -product -version -wanted : NoneType, dict - -__init__(vartype, path, product, version, avail, wanted, auth) -_check_valid_lists(vgrp, allpaths, var_list, beam_list, keyword_list) -_get_combined_list(beam_list, keyword_list) -_get_sum_varlist(var_list, all_vars, defaults) -_iter_paths(sum_varlist, req_vars, vgrp, beam_list, keyword_list) -_iter_vars(sum_varlist, req_vars, vgrp) -append(defaults, var_list, beam_list, keyword_list) -avail(options, internal) -parse_var_list(varlist, tiered, tiered_vars) -remove(all, var_list, beam_list, keyword_list) + +Variables + +_avail : NoneType, list +_path : NoneType +_product : NoneType, str +_version +path +product +version +wanted : NoneType, dict + +__init__(vartype, path, product, version, avail, wanted, auth) +_check_valid_lists(vgrp, allpaths, var_list, beam_list, keyword_list) +_get_combined_list(beam_list, keyword_list) +_get_sum_varlist(var_list, all_vars, defaults) +_iter_paths(sum_varlist, req_vars, vgrp, beam_list, keyword_list) +_iter_vars(sum_varlist, req_vars, vgrp) +append(defaults, var_list, beam_list, keyword_list) +avail(options, internal) +parse_var_list(varlist, tiered, tiered_vars) +remove(all, var_list, beam_list, keyword_list) - + icepyx.core.variables.Variables->icepyx.core.auth.EarthdataAuthMixin - - + + - + icepyx.core.variables.Variables->icepyx.core.query.Query - - -_order_vars + + +_order_vars - + icepyx.core.variables.Variables->icepyx.core.query.Query - - -_order_vars + + +_order_vars - + icepyx.core.variables.Variables->icepyx.core.read.Read - - -_read_vars + + +_read_vars - + icepyx.core.visualization.Visualize - -Visualize - -bbox : list -cycles : NoneType -date_range : NoneType -product : NoneType, str -tracks : NoneType - -__init__(query_obj, product, spatial_extent, date_range, cycles, tracks) -generate_OA_parameters(): list -grid_bbox(binsize): list -make_request(base_url, payload) -parallel_request_OA(): da.array -query_icesat2_filelist(): tuple -request_OA_data(paras): da.array -viz_elevation(): (hv.DynamicMap, hv.Layout) + +Visualize + +bbox : list +cycles : NoneType +date_range : NoneType +product : NoneType, str +tracks : NoneType + +__init__(query_obj, product, spatial_extent, date_range, cycles, tracks) +generate_OA_parameters(): list +grid_bbox(binsize): list +make_request(base_url, payload) +parallel_request_OA(): da.array +query_icesat2_filelist(): tuple +request_OA_data(paras): da.array +viz_elevation(): (hv.DynamicMap, hv.Layout) diff --git a/doc/source/user_guide/documentation/classes_quest_user_uml.svg b/doc/source/user_guide/documentation/classes_quest_user_uml.svg new file mode 100644 index 000000000..b5ce50192 --- /dev/null +++ b/doc/source/user_guide/documentation/classes_quest_user_uml.svg @@ -0,0 +1,64 @@ + + + + + + +classes_quest_user_uml + + + +icepyx.quest.dataset_scripts.argo.Argo + +Argo + +argodata : DataFrame, NoneType +params +presRange +prof_ids : list + +download(params, presRange, keep_existing): pd.DataFrame +save(filepath) +search_data(params, presRange, printURL): str + + + +icepyx.quest.dataset_scripts.dataset.DataSet + +DataSet + + +download +() +save +(filepath) +search_data +() +visualize +() + + + +icepyx.quest.dataset_scripts.argo.Argo->icepyx.quest.dataset_scripts.dataset.DataSet + + + + + +icepyx.quest.quest.Quest + +Quest + +datasets : dict + +add_argo(params, presRange): None +add_icesat2(product, start_time, end_time, version, cycles, tracks, files): None +download_all(path) +save_all(path) +search_all() + + + diff --git a/doc/source/user_guide/documentation/classes_user_uml.svg b/doc/source/user_guide/documentation/classes_user_uml.svg index 256cc1794..6ecef9681 100644 --- a/doc/source/user_guide/documentation/classes_user_uml.svg +++ b/doc/source/user_guide/documentation/classes_user_uml.svg @@ -4,11 +4,11 @@ - + classes_user_uml - + icepyx.core.auth.AuthenticationError @@ -30,300 +30,306 @@ icepyx.core.auth.EarthdataAuthMixin - -EarthdataAuthMixin - -auth -s3login_credentials -session - -earthdata_login(uid, email, s3token): None + +EarthdataAuthMixin + +auth +s3login_credentials +session + +earthdata_login(uid, email, s3token): None icepyx.core.query.GenQuery - -GenQuery - -dates -end_time -spatial -spatial_extent -start_time -temporal - - + +GenQuery + +dates +end_time +spatial +spatial_extent +start_time +temporal + + icepyx.core.granules.Granules - -Granules - -avail : list -orderIDs : list - -download(verbose, path, session, restart) -get_avail(CMRparams, reqparams, cloud) -place_order(CMRparams, reqparams, subsetparams, verbose, subset, session, geom_filepath) + +Granules + +avail : list +orderIDs : list + +download(verbose, path, restart) +get_avail(CMRparams, reqparams, cloud) +place_order(CMRparams, reqparams, subsetparams, verbose, subset, geom_filepath) + + + +icepyx.core.granules.Granules->icepyx.core.auth.EarthdataAuthMixin + + icepyx.core.query.Query - -Query - -CMRparams -cycles -dataset -granules -order_vars -product -product_version -reqparams -tracks - -avail_granules(ids, cycles, tracks, cloud) -download_granules(path, verbose, subset, restart) -latest_version() -order_granules(verbose, subset, email) -product_all_info() -product_summary_info() -show_custom_options(dictview) -subsetparams() -visualize_elevation() -visualize_spatial_extent() + +Query + +CMRparams +cycles +dataset +granules +order_vars +product +product_version +reqparams +tracks + +avail_granules(ids, cycles, tracks, cloud) +download_granules(path, verbose, subset, restart) +latest_version() +order_granules(verbose, subset, email) +product_all_info() +product_summary_info() +show_custom_options(dictview) +subsetparams() +visualize_elevation() +visualize_spatial_extent() - + icepyx.core.granules.Granules->icepyx.core.query.Query - - -_granules + + +_granules - + icepyx.core.granules.Granules->icepyx.core.query.Query - - -_granules + + +_granules icepyx.core.icesat2data.Icesat2Data - -Icesat2Data - - - + +Icesat2Data + + + icepyx.core.exceptions.NsidcQueryError - -NsidcQueryError - -errmsg -msgtxt : str - - + +NsidcQueryError + +errmsg +msgtxt : str + + icepyx.core.exceptions.QueryError - -QueryError - - - + +QueryError + + + icepyx.core.exceptions.NsidcQueryError->icepyx.core.exceptions.QueryError - - + + icepyx.core.APIformatting.Parameters - -Parameters - -fmted_keys -partype -poss_keys - -build_params() -check_req_values() -check_values() - - - -icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_CMRparams + +Parameters + +fmted_keys +partype +poss_keys + +build_params() +check_req_values() +check_values() icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_reqparams + + +_CMRparams icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_subsetparams + + +_reqparams icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_subsetparams + + +_subsetparams + + + +icepyx.core.APIformatting.Parameters->icepyx.core.query.Query + + +_subsetparams - + icepyx.core.query.Query->icepyx.core.auth.EarthdataAuthMixin - - + + - + icepyx.core.query.Query->icepyx.core.query.GenQuery - - + + icepyx.core.read.Read - -Read - -filelist -is_s3 -product -vars - -load() + +Read + +filelist +is_s3 +product +vars + +load() - + icepyx.core.read.Read->icepyx.core.auth.EarthdataAuthMixin - - + + icepyx.core.spatial.Spatial - -Spatial - -extent -extent_as_gdf -extent_file -extent_type - -fmt_for_CMR() -fmt_for_EGI() + +Spatial + +extent +extent_as_gdf +extent_file +extent_type + +fmt_for_CMR() +fmt_for_EGI() - + icepyx.core.spatial.Spatial->icepyx.core.query.GenQuery - - -_spatial + + +_spatial - + icepyx.core.spatial.Spatial->icepyx.core.query.GenQuery - - -_spatial + + +_spatial icepyx.core.temporal.Temporal - -Temporal - -end -start - - + +Temporal + +end +start + + - + icepyx.core.temporal.Temporal->icepyx.core.query.GenQuery - - -_temporal + + +_temporal icepyx.core.variables.Variables - -Variables - -path -product -version -wanted : NoneType, dict - -append(defaults, var_list, beam_list, keyword_list) -avail(options, internal) -parse_var_list(varlist, tiered, tiered_vars) -remove(all, var_list, beam_list, keyword_list) + +Variables + +path +product +version +wanted : NoneType, dict + +append(defaults, var_list, beam_list, keyword_list) +avail(options, internal) +parse_var_list(varlist, tiered, tiered_vars) +remove(all, var_list, beam_list, keyword_list) - + icepyx.core.variables.Variables->icepyx.core.auth.EarthdataAuthMixin - - + + - + icepyx.core.variables.Variables->icepyx.core.query.Query - - -_order_vars + + +_order_vars - + icepyx.core.variables.Variables->icepyx.core.query.Query - - -_order_vars + + +_order_vars - + icepyx.core.variables.Variables->icepyx.core.read.Read - - -_read_vars + + +_read_vars icepyx.core.visualization.Visualize - -Visualize - -bbox : list -cycles : NoneType -date_range : NoneType -product : NoneType, str -tracks : NoneType - -generate_OA_parameters(): list -grid_bbox(binsize): list -make_request(base_url, payload) -parallel_request_OA(): da.array -query_icesat2_filelist(): tuple -request_OA_data(paras): da.array -viz_elevation(): (hv.DynamicMap, hv.Layout) + +Visualize + +bbox : list +cycles : NoneType +date_range : NoneType +product : NoneType, str +tracks : NoneType + +generate_OA_parameters(): list +grid_bbox(binsize): list +make_request(base_url, payload) +parallel_request_OA(): da.array +query_icesat2_filelist(): tuple +request_OA_data(paras): da.array +viz_elevation(): (hv.DynamicMap, hv.Layout) diff --git a/doc/source/user_guide/documentation/components.rst b/doc/source/user_guide/documentation/components.rst index dea41a970..7d81c190c 100644 --- a/doc/source/user_guide/documentation/components.rst +++ b/doc/source/user_guide/documentation/components.rst @@ -10,7 +10,7 @@ APIformatting :members: :undoc-members: :show-inheritance: - + EarthdataAuthMixin ------------------ @@ -27,14 +27,6 @@ granules :undoc-members: :show-inheritance: -is2cat ------- - -.. automodule:: icepyx.core.is2cat - :members: - :undoc-members: - :show-inheritance: - is2ref ------ @@ -73,4 +65,4 @@ visualize .. automodule:: icepyx.core.visualization :members: :undoc-members: - :show-inheritance: \ No newline at end of file + :show-inheritance: diff --git a/doc/source/user_guide/documentation/icepyx-quest.rst b/doc/source/user_guide/documentation/icepyx-quest.rst new file mode 100644 index 000000000..9fc95a40f --- /dev/null +++ b/doc/source/user_guide/documentation/icepyx-quest.rst @@ -0,0 +1,22 @@ +.. _api_doc_ref_q: + +icepyx-QUEST Documentation (API) +================================ + +QUEST and icepyx share the top-level GenQuery class. +The documentation for GenQuery are within :class:`icepyx.Query`. + +.. image:: classes_quest_user_uml.svg + :width: 600 + :alt: UML Class Diagram illustrating the public-facing classes within quest, their attributes and methods, their relationships (e.g. component classes). + +Class diagram illustrating the QUEST component's of icepyx's +public-facing classes, their attributes and methods, and their relationships. +Additional UML diagrams, including a more detailed, developer UML class diagram showing hidden parameters, +are available on `GitHub in the icepyx/doc/source/user_guide/documentation/ directory `_. +Diagrams are updated automatically after a pull request (PR) is approved and before it is merged to the development branch. + +.. toctree:: + :maxdepth: 1 + + quest diff --git a/doc/source/user_guide/documentation/icepyx.rst b/doc/source/user_guide/documentation/icepyx.rst index a8a9a6f8e..eec823e10 100644 --- a/doc/source/user_guide/documentation/icepyx.rst +++ b/doc/source/user_guide/documentation/icepyx.rst @@ -1,27 +1,21 @@ .. _api_doc_ref: -icepyx Documentation (API Reference) -==================================== - -.. image:: packages_user_uml.svg - :width: 600 - :alt: UML package Diagram illustrating the public-facing, high-level packages within icepyx and their relationships. - -icepyx package diagram illustrating the library's public-facing, high-level package structure and their relationships. - +icepyx Documentation (API) +========================== .. image:: classes_user_uml.svg :width: 600 :alt: UML Class Diagram illustrating the public-facing classes within icepyx, their attributes and methods, their relationships (e.g. component classes). icepyx class diagram illustrating the library's public-facing classes, their attributes and methods, and their relationships. -A more detailed, developer UML class diagram showing hidden parameters is available on GitHub in the ``icepyx/doc/source/user_guide/documentation/`` directory. +Additional UML diagrams, including a more detailed, developer UML class diagram showing hidden parameters, +are available on `GitHub in the icepyx/doc/source/user_guide/documentation/ directory `_. Diagrams are updated automatically after a pull request (PR) is approved and before it is merged to the development branch. .. toctree:: + :maxdepth: 1 query read - quest variables components diff --git a/doc/source/user_guide/documentation/packages_quest_user_uml.svg b/doc/source/user_guide/documentation/packages_quest_user_uml.svg new file mode 100644 index 000000000..7ef3ca7d9 --- /dev/null +++ b/doc/source/user_guide/documentation/packages_quest_user_uml.svg @@ -0,0 +1,61 @@ + + + + + + +packages_quest_user_uml + + + +icepyx.quest + +icepyx.quest + + + +icepyx.quest.dataset_scripts + +icepyx.quest.dataset_scripts + + + +icepyx.quest.dataset_scripts.dataset + +icepyx.quest.dataset_scripts.dataset + + + +icepyx.quest.dataset_scripts->icepyx.quest.dataset_scripts.dataset + + + + + +icepyx.quest.dataset_scripts.argo + +icepyx.quest.dataset_scripts.argo + + + +icepyx.quest.dataset_scripts.argo->icepyx.quest.dataset_scripts.dataset + + + + + +icepyx.quest.quest + +icepyx.quest.quest + + + +icepyx.quest.quest->icepyx.quest.dataset_scripts.argo + + + + + diff --git a/doc/source/user_guide/documentation/packages_user_uml.svg b/doc/source/user_guide/documentation/packages_user_uml.svg index 5c45fc92b..029a13015 100644 --- a/doc/source/user_guide/documentation/packages_user_uml.svg +++ b/doc/source/user_guide/documentation/packages_user_uml.svg @@ -4,11 +4,11 @@ - + packages_user_uml - + icepyx.core @@ -24,146 +24,152 @@ icepyx.core.auth - -icepyx.core.auth + +icepyx.core.auth icepyx.core.exceptions - -icepyx.core.exceptions + +icepyx.core.exceptions icepyx.core.auth->icepyx.core.exceptions - - + + icepyx.core.granules - -icepyx.core.granules + +icepyx.core.granules + + + +icepyx.core.granules->icepyx.core.auth + + icepyx.core.icesat2data - -icepyx.core.icesat2data + +icepyx.core.icesat2data - + icepyx.core.icesat2data->icepyx.core.exceptions - - + + icepyx.core.is2ref - -icepyx.core.is2ref + +icepyx.core.is2ref icepyx.core.query - -icepyx.core.query + +icepyx.core.query - + icepyx.core.query->icepyx.core.auth - - + + - + icepyx.core.query->icepyx.core.exceptions - - + + - + icepyx.core.query->icepyx.core.granules - - + + icepyx.core.variables - -icepyx.core.variables + +icepyx.core.variables - + icepyx.core.query->icepyx.core.variables - - + + icepyx.core.visualization - -icepyx.core.visualization + +icepyx.core.visualization - + icepyx.core.query->icepyx.core.visualization - - + + icepyx.core.read - -icepyx.core.read + +icepyx.core.read - + icepyx.core.read->icepyx.core.auth - - + + - + icepyx.core.read->icepyx.core.exceptions - - + + - + icepyx.core.read->icepyx.core.variables - - + + icepyx.core.spatial - -icepyx.core.spatial + +icepyx.core.spatial icepyx.core.temporal - -icepyx.core.temporal + +icepyx.core.temporal icepyx.core.validate_inputs - -icepyx.core.validate_inputs + +icepyx.core.validate_inputs - + icepyx.core.variables->icepyx.core.auth - - + + - + icepyx.core.variables->icepyx.core.exceptions - - + + diff --git a/doc/source/user_guide/documentation/query.rst b/doc/source/user_guide/documentation/query.rst index 168986548..df82aa35b 100644 --- a/doc/source/user_guide/documentation/query.rst +++ b/doc/source/user_guide/documentation/query.rst @@ -23,7 +23,6 @@ Attributes Query.cycles Query.dates Query.end_time - Query.file_vars Query.granules Query.order_vars Query.product @@ -51,4 +50,4 @@ Methods Query.product_summary_info Query.show_custom_options Query.visualize_spatial_extent - Query.visualize_elevation \ No newline at end of file + Query.visualize_elevation diff --git a/doc/source/user_guide/documentation/quest.rst b/doc/source/user_guide/documentation/quest.rst index 79ebbd621..c5ae240b2 100644 --- a/doc/source/user_guide/documentation/quest.rst +++ b/doc/source/user_guide/documentation/quest.rst @@ -15,16 +15,14 @@ Constructors Quest -Attributes ----------- - -.. autosummary:: - :toctree: ../../_icepyx/ - - Methods ------- .. autosummary:: :toctree: ../../_icepyx/ + Quest.add_icesat2 + Quest.add_argo + Quest.search_all + Quest.download_all + Quest.save_all diff --git a/doc/sphinxext/contributors.py b/doc/sphinxext/contributors.py index 976ae0a5a..21154166d 100644 --- a/doc/sphinxext/contributors.py +++ b/doc/sphinxext/contributors.py @@ -13,6 +13,7 @@ While the v0.23.1 tag does not exist, that will use the HEAD of the branch as the end of the revision range. """ + from announce import build_components from docutils import nodes from docutils.parsers.rst import Directive diff --git a/examples/README.md b/examples/README.md index f44e8846c..08f7ac50d 100644 --- a/examples/README.md +++ b/examples/README.md @@ -2,4 +2,4 @@ Examples are available in the [documentation](https://icepyx.readthedocs.io). Source Jupyter notebooks and supporting materials are in -[`doc/source/example_notebooks`](https://github.com/icesat2py/icepyx/tree/main/doc/source/example_notebooks). \ No newline at end of file +[`doc/source/example_notebooks`](https://github.com/icesat2py/icepyx/tree/main/doc/source/example_notebooks). diff --git a/icepyx/core/APIformatting.py b/icepyx/core/APIformatting.py index b5d31bdfa..eb77c37e9 100644 --- a/icepyx/core/APIformatting.py +++ b/icepyx/core/APIformatting.py @@ -1,7 +1,6 @@ # Generate and format information for submitting to API (CMR and NSIDC) import datetime as dt -import pprint # ---------------------------------------------------------------------- @@ -134,13 +133,13 @@ def combine_params(*param_dicts): Examples -------- - >>> CMRparams = {'short_name': 'ATL06', 'version': '002', 'temporal': '2019-02-20T00:00:00Z,2019-02-28T23:59:59Z', 'bounding_box': '-55,68,-48,71'} - >>> reqparams = {'page_size': 2000, 'page_num': 1} + >>> CMRparams = {'temporal': '2019-02-20T00:00:00Z,2019-02-28T23:59:59Z', 'bounding_box': '-55,68,-48,71'} + >>> reqparams = {'short_name': 'ATL06', 'version': '002', 'page_size': 2000, 'page_num': 1} >>> ipx.core.APIformatting.combine_params(CMRparams, reqparams) - {'short_name': 'ATL06', - 'version': '002', - 'temporal': '2019-02-20T00:00:00Z,2019-02-28T23:59:59Z', + {'temporal': '2019-02-20T00:00:00Z,2019-02-28T23:59:59Z', 'bounding_box': '-55,68,-48,71', + 'short_name': 'ATL06', + 'version': '002', 'page_size': 2000, 'page_num': 1} """ @@ -164,17 +163,18 @@ def to_string(params): Examples -------- - >>> CMRparams = {'short_name': 'ATL06', 'version': '002', 'temporal': '2019-02-20T00:00:00Z,2019-02-28T23:59:59Z', 'bounding_box': '-55,68,-48,71'} - >>> reqparams = {'page_size': 2000, 'page_num': 1} + >>> CMRparams = {'temporal': '2019-02-20T00:00:00Z,2019-02-28T23:59:59Z', + ... 'bounding_box': '-55,68,-48,71'} + >>> reqparams = {'short_name': 'ATL06', 'version': '002', 'page_size': 2000, 'page_num': 1} >>> params = ipx.core.APIformatting.combine_params(CMRparams, reqparams) >>> ipx.core.APIformatting.to_string(params) - 'short_name=ATL06&version=002&temporal=2019-02-20T00:00:00Z,2019-02-28T23:59:59Z&bounding_box=-55,68,-48,71&page_size=2000&page_num=1' + 'temporal=2019-02-20T00:00:00Z,2019-02-28T23:59:59Z&bounding_box=-55,68,-48,71&short_name=ATL06&version=002&page_size=2000&page_num=1' """ param_list = [] for k, v in params.items(): if isinstance(v, list): - for l in v: - param_list.append(k + "=" + l) + for i in v: + param_list.append(k + "=" + i) else: param_list.append(k + "=" + str(v)) # return the parameter string @@ -255,7 +255,6 @@ def _get_possible_keys(self): if self.partype == "CMR": self._poss_keys = { - "default": ["short_name", "version"], "spatial": ["bounding_box", "polygon"], "optional": [ "temporal", @@ -266,8 +265,10 @@ def _get_possible_keys(self): } elif self.partype == "required": self._poss_keys = { - "search": ["page_size"], + "search": ["short_name", "version", "page_size"], "download": [ + "short_name", + "version", "page_size", "page_num", "request_mode", @@ -279,7 +280,6 @@ def _get_possible_keys(self): } elif self.partype == "subset": self._poss_keys = { - "default": [], "spatial": ["bbox", "Boundingshape"], "optional": [ "time", @@ -305,6 +305,7 @@ def _check_valid_keys(self): "An invalid key (" + key + ") was passed. Please remove it using `del`" ) + # DevNote: can check_req_values and check_values be combined? def check_req_values(self): """ Check that all of the required keys have values, if the key was passed in with @@ -333,22 +334,14 @@ def check_values(self): self.partype != "required" ), "You cannot call this function for your parameter type" - default_keys = self.poss_keys["default"] spatial_keys = self.poss_keys["spatial"] - if all(keys in self._fmted_keys.keys() for keys in default_keys): - assert all( - self.fmted_keys.get(key, -9999) != -9999 for key in default_keys + # not the most robust check, but better than nothing... + if any(keys in self._fmted_keys.keys() for keys in spatial_keys): + assert any( + self.fmted_keys.get(key, -9999) != -9999 for key in spatial_keys ), "One of your formated parameters is missing a value" - - # not the most robust check, but better than nothing... - if any(keys in self._fmted_keys.keys() for keys in spatial_keys): - assert any( - self.fmted_keys.get(key, -9999) != -9999 for key in default_keys - ), "One of your formated parameters is missing a value" - return True - else: - return False + return True else: return False @@ -360,14 +353,19 @@ def build_params(self, **kwargs): Parameters ---------- **kwargs - Keyword inputs containing the needed information to build the parameter list, depending on - parameter type, if the already formatted key:value is not submitted as a kwarg. - May include optional keyword arguments to be passed to the subsetter. Valid keywords - are time, bbox OR Boundingshape, format, projection, projection_parameters, and Coverage. - - Keyword argument inputs for 'CMR' may include: dataset (data product), version, start, end, extent_type, spatial_extent - Keyword argument inputs for 'required' may include: page_size, page_num, request_mode, include_meta, client_string - Keyword argument inputs for 'subset' may include: geom_filepath, start, end, extent_type, spatial_extent + Keyword inputs containing the needed information to build the parameter list, depending + on parameter type, if the already formatted key:value is not submitted as a kwarg. + May include optional keyword arguments to be passed to the subsetter. + Valid keywords are time, bbox OR Boundingshape, format, projection, + projection_parameters, and Coverage. + + Keyword argument inputs for 'CMR' may include: + start, end, extent_type, spatial_extent + Keyword argument inputs for 'required' may include: + product or short_name, version, page_size, page_num, + request_mode, include_meta, client_string + Keyword argument inputs for 'subset' may include: + geom_filepath, start, end, extent_type, spatial_extent """ @@ -388,8 +386,16 @@ def build_params(self, **kwargs): "include_meta": "Y", "client_string": "icepyx", } + for key in reqkeys: - if key in kwargs: + if key == "short_name": + try: + self._fmted_keys.update({key: kwargs[key]}) + except KeyError: + self._fmted_keys.update({key: kwargs["product"]}) + elif key == "version": + self._fmted_keys.update({key: kwargs["version"]}) + elif key in kwargs: self._fmted_keys.update({key: kwargs[key]}) elif key in defaults: self._fmted_keys.update({key: defaults[key]}) @@ -397,22 +403,12 @@ def build_params(self, **kwargs): pass else: - if self.check_values == True and kwargs == None: + if self.check_values is True and kwargs is None: pass else: - default_keys = self.poss_keys["default"] spatial_keys = self.poss_keys["spatial"] opt_keys = self.poss_keys["optional"] - for key in default_keys: - if key in self._fmted_keys.values(): - assert self._fmted_keys[key] - else: - if key == "short_name": - self._fmted_keys.update({key: kwargs["product"]}) - elif key == "version": - self._fmted_keys.update({key: kwargs["version"]}) - for key in opt_keys: if key == "Coverage" and key in kwargs.keys(): # DevGoal: make there be an option along the lines of Coverage=default, which will get the default variables for that product without the user having to input is2obj.build_wanted_wanted_var_list as their input value for using the Coverage kwarg diff --git a/icepyx/core/granules.py b/icepyx/core/granules.py index a0849968e..5c298c625 100644 --- a/icepyx/core/granules.py +++ b/icepyx/core/granules.py @@ -7,11 +7,11 @@ import numpy as np import os import pprint -import warnings from xml.etree import ElementTree as ET import zipfile import icepyx.core.APIformatting as apifmt +from icepyx.core.auth import EarthdataAuthMixin import icepyx.core.exceptions @@ -37,7 +37,8 @@ def info(grans): # DevNote: could add flag to separate ascending and descending orbits based on ATL03 granule region def gran_IDs(grans, ids=False, cycles=False, tracks=False, dates=False, cloud=False): """ - Returns a list of granule information for each granule dictionary in the input list of granule dictionaries. + Returns a list of granule information for each granule dictionary + in the input list of granule dictionaries. Granule info may be from a list of those available from NSIDC (for ordering/download) or a list of granules present on the file system. @@ -71,15 +72,17 @@ def gran_IDs(grans, ids=False, cycles=False, tracks=False, dates=False, cloud=Fa producer_granule_id = gran["producer_granule_id"] gran_ids.append(producer_granule_id) - if cloud == True: + if cloud is True: try: for link in gran["links"]: - if link["href"].startswith("s3") and link["href"].endswith(".h5"): + if link["href"].startswith("s3") and link["href"].endswith( + (".h5", "nc") + ): gran_s3urls.append(link["href"]) except KeyError: pass - if any([param == True for param in [cycles, tracks, dates]]): + if any([param is True for param in [cycles, tracks, dates]]): # PRD: ICESat-2 product # HEM: Sea Ice Hemisphere flag # YY,MM,DD,HH,MN,SS: Year, Month, Day, Hour, Minute, Second @@ -137,7 +140,7 @@ def gran_IDs(grans, ids=False, cycles=False, tracks=False, dates=False, cloud=Fa # DevGoal: this will be a great way/place to manage data from the local file system # where the user already has downloaded data! # DevNote: currently this class is not tested -class Granules: +class Granules(EarthdataAuthMixin): """ Interact with ICESat-2 data granules. This includes finding, ordering, and downloading them as well as (not yet implemented) getting already @@ -155,7 +158,9 @@ def __init__( # files=[], # session=None ): - pass + # initialize authentication properties + EarthdataAuthMixin.__init__(self) + # self.avail = avail # self.orderIDs = orderIDs # self.files = files @@ -200,11 +205,12 @@ def get_avail(self, CMRparams, reqparams, cloud=False): granule_search_url = "https://cmr.earthdata.nasa.gov/search/granules" headers = {"Accept": "application/json", "Client-Id": "icepyx"} - # note we should also check for errors whenever we ping NSIDC-API - make a function to check for errors + # note we should also check for errors whenever we ping NSIDC-API - + # make a function to check for errors params = apifmt.combine_params( CMRparams, - {k: reqparams[k] for k in ["page_size"]}, + {k: reqparams[k] for k in ["short_name", "version", "page_size"]}, {"provider": "NSIDC_CPRD"}, ) @@ -251,7 +257,8 @@ def get_avail(self, CMRparams, reqparams, cloud=False): len(self.avail) > 0 ), "Your search returned no results; try different search parameters" - # DevNote: currently, default subsetting DOES NOT include variable subsetting, only spatial and temporal + # DevNote: currently, default subsetting DOES NOT include variable subsetting, + # only spatial and temporal # DevGoal: add kwargs to allow subsetting and more control over request options. def place_order( self, @@ -260,7 +267,6 @@ def place_order( subsetparams, verbose, subset=True, - session=None, geom_filepath=None, ): # , **kwargs): """ @@ -284,14 +290,12 @@ def place_order( Progress information is automatically printed regardless of the value of verbose. subset : boolean, default True Apply subsetting to the data order from the NSIDC, returning only data that meets the - subset parameters. Spatial and temporal subsetting based on the input parameters happens + subset parameters. + Spatial and temporal subsetting based on the input parameters happens by default when subset=True, but additional subsetting options are available. - Spatial subsetting returns all data that are within the area of interest (but not complete - granules. This eliminates false-positive granules returned by the metadata-level search) - session : requests.session object - A session object authenticating the user to order data using their Earthdata login information. - The session object will automatically be passed from the query object if you - have successfully logged in there. + Spatial subsetting returns all data that are within the area of interest + (but not complete granules. + This eliminates false-positive granules returned by the metadata-level search) geom_filepath : string, default None String of the full filename and path when the spatial input is a file. @@ -305,11 +309,6 @@ def place_order( query.Query.order_granules """ - if session is None: - raise ValueError( - "Don't forget to log in to Earthdata using query.earthdata_login()" - ) - base_url = "https://n5eil02u.ecs.nsidc.org/egi/request" self.get_avail(CMRparams, reqparams) @@ -345,15 +344,12 @@ def place_order( total_pages, " is submitting to NSIDC", ) - request_params = apifmt.combine_params(CMRparams, reqparams, subsetparams) request_params.update({"page_num": page_num}) - # DevNote: earlier versions of the code used a file upload+post rather than putting the geometries - # into the parameter dictionaries. However, this wasn't working with shapefiles, but this more general - # solution does, so the geospatial parameters are included in the parameter dictionaries. - request = session.get(base_url, params=request_params) + request = self.session.get(base_url, params=request_params) - # DevGoal: use the request response/number to do some error handling/give the user better messaging for failures + # DevGoal: use the request response/number to do some error handling/ + # give the user better messaging for failures # print(request.content) root = ET.fromstring(request.content) # print([subset_agent.attrib for subset_agent in root.iter('SubsetAgent')]) @@ -387,7 +383,7 @@ def place_order( print("status URL: ", statusURL) # Find order status - request_response = session.get(statusURL) + request_response = self.session.get(statusURL) if verbose is True: print( "HTTP response from order response URL: ", @@ -412,7 +408,7 @@ def place_order( ) # print('Status is not complete. Trying again') time.sleep(10) - loop_response = session.get(statusURL) + loop_response = self.session.get(statusURL) # Raise bad request: Loop will stop for bad response code. loop_response.raise_for_status() @@ -452,10 +448,10 @@ def place_order( else: print("Request failed.") - # DevGoal: save orderIDs more frequently than just at the end for large orders (e.g. for len(reqparams['page_num']) > 5 or 10 or something) + # DevGoal: save orderIDs more frequently than just at the end for large orders + # (e.g. for len(reqparams['page_num']) > 5 or 10 or something) # Save orderIDs to file to avoid resubmitting order in case kernel breaks down. # save orderIDs for every 5 orders when more than 10 orders are submitted. - # DevNote: These numbers are hard coded for now. Consider to allow user to set them in future? if reqparams["page_num"] >= 10: with open(order_fn, "w") as fid: json.dump({"orderIDs": self.orderIDs}, fid) @@ -466,7 +462,7 @@ def place_order( return self.orderIDs - def download(self, verbose, path, session=None, restart=False): + def download(self, verbose, path, restart=False): """ Downloads the data for the object's orderIDs, which are generated by ordering data from the NSIDC. @@ -478,13 +474,10 @@ def download(self, verbose, path, session=None, restart=False): Progress information is automatically printed regardless of the value of verbose. path : string String with complete path to desired download directory and location. - session : requests.session object - A session object authenticating the user to download data using their Earthdata login information. - The session object will automatically be passed from the query object if you - have successfully logged in there. restart : boolean, default False - Restart your download if it has been interrupted. If the kernel has been restarted, but you successfully - completed your order, you will need to re-initialize your query class object and log in to Earthdata + Restart your download if it has been interrupted. + If the kernel has been restarted, but you successfully + completed your order, you will need to re-initialize your query class object and can then skip immediately to the download_granules method with restart=True. Notes @@ -501,14 +494,8 @@ def download(self, verbose, path, session=None, restart=False): Unzip the downloaded granules. """ - # Note: need to test these checks still - if session is None: - raise ValueError( - "Don't forget to log in to Earthdata using query.earthdata_login()" - ) - # DevGoal: make this a more robust check for an active session - - # DevNote: this will replace any existing orderIDs with the saved list (could create confusion depending on whether download was interrupted or kernel restarted) + # DevNote: this will replace any existing orderIDs with the saved list + # (could create confusion depending on whether download was interrupted or kernel restarted) order_fn = ".order_restart" if os.path.exists(order_fn): with open(order_fn, "r") as fid: @@ -520,7 +507,8 @@ def download(self, verbose, path, session=None, restart=False): "Please confirm that you have submitted a valid order and it has successfully completed." ) - # DevNote: Temporary. Hard code the orderID info files here. order_fn should be consistent with place_order. + # DevNote: Temporary. Hard code the orderID info files here. + # order_fn should be consistent with place_order. downid_fn = ".download_ID" @@ -543,7 +531,7 @@ def download(self, verbose, path, session=None, restart=False): print("Beginning download of zipped output...") try: - zip_response = session.get(downloadURL) + zip_response = self.session.get(downloadURL) # Raise bad request: Loop will stop for bad response code. zip_response.raise_for_status() print( @@ -557,7 +545,8 @@ def download(self, verbose, path, session=None, restart=False): print( "Unable to download ", order, ". Check granule order for messages." ) - # DevGoal: move this option back out to the is2class level and implement it in an alternate way? + # DevGoal: move this option back out to the is2class level + # and implement it in an alternate way? # #Note: extract the data to save it locally else: with zipfile.ZipFile(io.BytesIO(zip_response.content)) as z: diff --git a/icepyx/core/is2ref.py b/icepyx/core/is2ref.py index c51c631be..361221d6a 100644 --- a/icepyx/core/is2ref.py +++ b/icepyx/core/is2ref.py @@ -15,7 +15,10 @@ def _validate_product(product): """ Confirm a valid ICESat-2 product was specified """ - error_msg = "A valid product string was not provided. Check user input, if given, or file metadata." + error_msg = ( + "A valid product string was not provided. " + "Check user input, if given, or file metadata." + ) if isinstance(product, str): product = str.upper(product) assert product in [ @@ -378,8 +381,10 @@ def extract_product(filepath, auth=None): # ATL14 saves the short_name as an array ['ATL14'] product = product[0] product = _validate_product(product) - except KeyError: - raise "Unable to parse the product name from file metadata" + except KeyError as e: + raise Exception( + "Unable to parse the product name from file metadata" + ).with_traceback(e.__traceback__) # Close the file reader f.close() @@ -421,8 +426,21 @@ def extract_version(filepath, auth=None): if isinstance(version, bytes): version = version.decode() - except KeyError: - raise "Unable to parse the version from file metadata" + except KeyError as e: + raise Exception( + "Unable to parse the version from file metadata" + ).with_traceback(e.__traceback__) + + # catch cases where the version number is an invalid string + # e.g. a VersionID of "SET_BY_PGE", causing issues where version needs to be a valid number + try: + float(version) + except ValueError: + raise Exception( + "There is an underlying issue with the version information" + "provided in the metadata of this file." + "Consider setting the version manually for further processing." + ) # Close the file reader f.close() diff --git a/icepyx/core/query.py b/icepyx/core/query.py index 25f13d5b6..46a306dd2 100644 --- a/icepyx/core/query.py +++ b/icepyx/core/query.py @@ -532,15 +532,14 @@ def tracks(self): @property def CMRparams(self): """ - Display the CMR key:value pairs that will be submitted. It generates the dictionary if it does not already exist. + Display the CMR key:value pairs that will be submitted. + It generates the dictionary if it does not already exist. Examples -------- >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) >>> reg_a.CMRparams - {'short_name': 'ATL06', - 'version': '006', - 'temporal': '2019-02-20T00:00:00Z,2019-02-28T23:59:59Z', + {'temporal': '2019-02-20T00:00:00Z,2019-02-28T23:59:59Z', 'bounding_box': '-55.0,68.0,-48.0,71.0'} """ @@ -552,7 +551,7 @@ def CMRparams(self): # dictionary of optional CMR parameters kwargs = {} # temporal CMR parameters - if hasattr(self, "_temporal"): + if hasattr(self, "_temporal") and self.product != "ATL11": kwargs["start"] = self._temporal._start kwargs["end"] = self._temporal._end # granule name CMR parameters (orbital or file name) @@ -564,8 +563,6 @@ def CMRparams(self): if self._CMRparams.fmted_keys == {}: self._CMRparams.build_params( - product=self.product, - version=self._version, extent_type=self._spatial._ext_type, spatial_extent=self._spatial.fmt_for_CMR(), **kwargs, @@ -583,22 +580,23 @@ def reqparams(self): -------- >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) >>> reg_a.reqparams - {'page_size': 2000} + {'short_name': 'ATL06', 'version': '006', 'page_size': 2000} >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) # doctest: +SKIP >>> reg_a.order_granules() # doctest: +SKIP >>> reg_a.reqparams # doctest: +SKIP - {'page_size': 2000, 'page_num': 1, 'request_mode': 'async', 'include_meta': 'Y', 'client_string': 'icepyx'} + {'short_name': 'ATL06', 'version': '006', 'page_size': 2000, 'page_num': 1, 'request_mode': 'async', 'include_meta': 'Y', 'client_string': 'icepyx'} """ if not hasattr(self, "_reqparams"): self._reqparams = apifmt.Parameters("required", reqtype="search") - self._reqparams.build_params() + self._reqparams.build_params(product=self.product, version=self._version) return self._reqparams.fmted_keys # @property - # DevQuestion: if I make this a property, I get a "dict" object is not callable when I try to give input kwargs... what approach should I be taking? + # DevQuestion: if I make this a property, I get a "dict" object is not callable + # when I try to give input kwargs... what approach should I be taking? def subsetparams(self, **kwargs): """ Display the subsetting key:value pairs that will be submitted. @@ -629,7 +627,7 @@ def subsetparams(self, **kwargs): self._subsetparams = apifmt.Parameters("subset") # temporal subsetting parameters - if hasattr(self, "temporal"): + if hasattr(self, "_temporal") and self.product != "ATL11": kwargs["start"] = self._temporal._start kwargs["end"] = self._temporal._end @@ -1031,14 +1029,13 @@ def order_granules(self, verbose=False, subset=True, email=False, **kwargs): "NSIDC only allows ordering of one granule by name at a time; your orders will be placed accordingly." ) for gran in gran_name_list: - tempCMRparams.update({"readable_granule_name[]": gran}) + tempCMRparams["readable_granule_name[]"] = gran self._granules.place_order( tempCMRparams, self.reqparams, self.subsetparams(**kwargs), verbose, subset, - session=self.session, geom_filepath=self._spatial._geom_file, ) @@ -1049,7 +1046,6 @@ def order_granules(self, verbose=False, subset=True, email=False, **kwargs): self.subsetparams(**kwargs), verbose, subset, - session=self.session, geom_filepath=self._spatial._geom_file, ) @@ -1114,7 +1110,7 @@ def download_granules( ): self.order_granules(verbose=verbose, subset=subset, **kwargs) - self._granules.download(verbose, path, session=self.session, restart=restart) + self._granules.download(verbose, path, restart=restart) # DevGoal: add testing? What do we test, and how, given this is a visualization. # DevGoal(long term): modify this to accept additional inputs, etc. diff --git a/icepyx/core/read.py b/icepyx/core/read.py index e11015935..2e4e03ade 100644 --- a/icepyx/core/read.py +++ b/icepyx/core/read.py @@ -1,4 +1,3 @@ -import fnmatch import glob import os import sys @@ -36,19 +35,21 @@ def _make_np_datetime(df, keyword): Example ------- - >>> ds = xr.Dataset({"time": ("time_idx", [b'2019-01-11T05:26:31.323722Z'])}, coords={"time_idx": [0]}) + >>> ds = xr.Dataset({"time": ("time_idx", [b'2019-01-11T05:26:31.323722Z'])}, + ... coords={"time_idx": [0]}) >>> _make_np_datetime(ds, "time") - + Size: 16B Dimensions: (time_idx: 1) Coordinates: - * time_idx (time_idx) int64 0 + * time_idx (time_idx) int64 8B 0 Data variables: - time (time_idx) datetime64[ns] 2019-01-11T05:26:31.323722 + time (time_idx) datetime64[ns] 8B 2019-01-11T05:26:31.323722 """ if df[keyword].str.endswith("Z"): - # manually remove 'Z' from datetime to allow conversion to np.datetime64 object (support for timezones is deprecated and causes a seg fault) + # manually remove 'Z' from datetime to allow conversion to np.datetime64 object + # (support for timezones is deprecated and causes a seg fault) df.update({keyword: df[keyword].str[:-1].astype(np.datetime64)}) else: @@ -100,165 +101,51 @@ def _get_track_type_str(grp_path) -> (str, str, str): return track_str, spot_dim_name, spot_var_name -# Dev note: function fully tested (except else, which don't know how to get to) -def _check_datasource(filepath): +def _parse_source(data_source, glob_kwargs={}) -> list: """ - Determine if the input is from a local system or is an s3 bucket. - Then, validate the inputs (for those on the local system; s3 sources are not validated currently) - """ - - from pathlib import Path - - import fsspec - from fsspec.implementations.local import LocalFileSystem - - source_types = ["is2_local", "is2_s3"] - - if not isinstance(filepath, Path) and not isinstance(filepath, str): - raise TypeError("filepath must be a string or Path") - - fsmap = fsspec.get_mapper(str(filepath)) - output_fs = fsmap.fs - - if "s3" in output_fs.protocol: - return source_types[1] - elif isinstance(output_fs, LocalFileSystem): - assert _validate_source(filepath) - return source_types[0] - else: - raise ValueError("Could not confirm the datasource type.") - - """ - Could also use: os.path.splitext(f.name)[1].lower() to get file extension - - If ultimately want to handle mixed types, save the valid paths in a dict with "s3" or "local" as the keys and the list of the files as the values. - Then the dict can also contain a catalog key with a dict of catalogs for each of those types of inputs ("s3" or "local") - In general, the issue we'll run into with multiple files is going to be merging during the read in, - so it could be beneficial to not hide this too much and mandate users handle this intentionally outside the read in itself. - - this function was derived with some of the following resources, based on echopype - https://github.com/OSOceanAcoustics/echopype/blob/ab5128fb8580f135d875580f0469e5fba3193b84/echopype/utils/io.py - - https://filesystem-spec.readthedocs.io/en/latest/api.html?highlight=get_map#fsspec.spec.AbstractFileSystem.glob - - https://filesystem-spec.readthedocs.io/en/latest/_modules/fsspec/implementations/local.html - - https://github.com/OSOceanAcoustics/echopype/blob/ab5128fb8580f135d875580f0469e5fba3193b84/echopype/convert/api.py#L380 - - https://echopype.readthedocs.io/en/stable/convert.html - """ - + Parse the user's data_source input based on type. -# Dev note: function fully tested as currently written -def _validate_source(source): - """ - Check that the entered data source paths on the local file system are valid - - Currently, s3 data source paths are not validated. - """ - - # acceptable inputs (for now) are a single file or directory - # would ultimately like to make a Path (from pathlib import Path; isinstance(source, Path)) an option - # see https://github.com/OSOceanAcoustics/echopype/blob/ab5128fb8580f135d875580f0469e5fba3193b84/echopype/utils/io.py#L82 - assert isinstance(source, str), "You must enter your input as a string." - assert ( - os.path.isdir(source) is True or os.path.isfile(source) is True - ), "Your data source string is not a valid data source." - return True - - -# Dev Note: function is tested (at least loosely) -def _run_fast_scandir(dir, fn_glob): - """ - Quickly scan nested directories to get a list of filenames that match the fn_glob string. - Modified from https://stackoverflow.com/a/59803793/2441026 - (faster than os.walk or glob methods, and allows filename matching in subdirectories). - - Parameters - ---------- - dir : str - full path to the input directory - - fn_glob : str - glob-style filename pattern - - Outputs + Returns ------- - subfolders : list - list of strings of all nested subdirectories - - files : list - list of strings containing full paths to each file matching the filename pattern + filelist : list of str + List of granule (filenames) to be read in """ - subfolders, files = [], [] - - for f in os.scandir(dir): - if any(f.name.startswith(s) for s in ["__", "."]): - continue - if f.is_dir(): - subfolders.append(f.path) - if f.is_file(): - if fnmatch.fnmatch(f.name, fn_glob): - files.append(f.path) - - for dir in list(subfolders): - sf, f = _run_fast_scandir(dir, fn_glob) - subfolders.extend(sf) - files.extend(f) - - return subfolders, files - - -# Need to post on intake's page to see if this would be a useful contribution... -# https://github.com/intake/intake/blob/0.6.4/intake/source/utils.py#L216 -def _pattern_to_glob(pattern): - """ - Adapted from intake.source.utils.path_to_glob to convert a path as pattern into a glob style path - that uses the pattern's indicated number of '?' instead of '*' where an int was specified. - - Returns pattern if pattern is not a string. - - Parameters - ---------- - pattern : str - Path as pattern optionally containing format_strings + from pathlib import Path - Returns - ------- - glob_path : str - Path with int format strings replaced with the proper number of '?' and '*' otherwise. + if isinstance(data_source, list): + assert [isinstance(f, (str, Path)) for f in data_source] + # if data_source is a list pass that directly to _filelist + filelist = data_source + elif os.path.isdir(data_source): + # if data_source is a directory glob search the directory and assign to _filelist + data_source = os.path.join(data_source, "*") + filelist = glob.glob(data_source, **glob_kwargs) + elif isinstance(data_source, str) or isinstance(data_source, Path): + if data_source.startswith("s3"): + # if the string is an s3 path put it in the _filelist without globbing + filelist = [data_source] + else: + # data_source is a globable string + filelist = glob.glob(data_source, **glob_kwargs) + else: + raise TypeError( + "data_source should be a list of files, a directory, the path to a file, " + "or a glob string." + ) - Examples - -------- - >>> _pattern_to_glob('{year}/{month}/{day}.csv') - '*/*/*.csv' - >>> _pattern_to_glob('{year:4}/{month:2}/{day:2}.csv') - '????/??/??.csv' - >>> _pattern_to_glob('data/{year:4}{month:02}{day:02}.csv') - 'data/????????.csv' - >>> _pattern_to_glob('data/*.csv') - 'data/*.csv' - """ - from string import Formatter + # Remove any directories from the list (these get generated during recursive + # glob search) + filelist = [f for f in filelist if not os.path.isdir(f)] - if not isinstance(pattern, str): - return pattern + # Make sure a non-zero number of files were found + if len(filelist) == 0: + raise KeyError( + "No files found matching the specified `data_source`. Check your glob " + "string or file list." + ) - fmt = Formatter() - glob_path = "" - # prev_field_name = None - for literal_text, field_name, format_specs, _ in fmt.parse(format_string=pattern): - glob_path += literal_text - if field_name and (glob_path != "*"): - try: - glob_path += "?" * int(format_specs) - except ValueError: - glob_path += "*" - # alternatively, you could use bits=utils._get_parts_of_format_string(resolved_string, literal_texts, format_specs) - # and then use len(bits[i]) to get the length of each format_spec - # print(glob_path) - return glob_path + return filelist def _confirm_proceed(): @@ -282,8 +169,8 @@ class Read(EarthdataAuthMixin): Parameters ---------- - data_source : string, List - A string or list which specifies the files to be read. + data_source : string, Path, List + A string, pathlib.Path object, or list which specifies the files to be read. The string can be either: 1) the path of a single file 2) the path to a directory or @@ -291,7 +178,8 @@ class Read(EarthdataAuthMixin): The List must be a list of strings, each of which is the path of a single file. glob_kwargs : dict, default {} - Additional arguments to be passed into the [glob.glob()](https://docs.python.org/3/library/glob.html#glob.glob)function + Additional arguments to be passed into the + [glob.glob()](https://docs.python.org/3/library/glob.html#glob.glob)function out_obj_type : object, default xarray.Dataset The desired format for the data to be read in. @@ -326,7 +214,8 @@ class Read(EarthdataAuthMixin): Reading all files in a directory >>> ipx.Read('/path/to/data/') # doctest: +SKIP - Reading files that match a particular pattern (here, all .h5 files that start with `processed_ATL06_`). + Reading files that match a particular pattern + (here, all .h5 files that start with `processed_ATL06_`). >>> ipx.Read('/path/to/data/processed_ATL06_*.h5') # doctest: +SKIP Reading a specific list of files @@ -370,29 +259,7 @@ def __init__( "Please use the `data_source` argument to specify your dataset instead." ) - if isinstance(data_source, list): - # if data_source is a list pass that directly to _filelist - self._filelist = data_source - elif os.path.isdir(data_source): - # if data_source is a directory glob search the directory and assign to _filelist - data_source = os.path.join(data_source, "*") - self._filelist = glob.glob(data_source, **glob_kwargs) - elif isinstance(data_source, str): - if data_source.startswith("s3"): - # if the string is an s3 path put it in the _filelist without globbing - self._filelist = [data_source] - else: - # data_source is a globable string - self._filelist = glob.glob(data_source, **glob_kwargs) - else: - raise TypeError( - "data_source should be a list of files, a directory, the path to a file, " - "or a glob string." - ) - - # Remove any directories from the list (these get generated during recursive - # glob search) - self._filelist = [f for f in self._filelist if not os.path.isdir(f)] + self._filelist = _parse_source(data_source, glob_kwargs) # Create a dictionary of the products as read from the metadata product_dict = {} @@ -423,7 +290,7 @@ def __init__( ) _confirm_proceed() - # Raise warnings or errors for multiple products or products not matching the user-specified product + # Raise error if multiple products given all_products = list(set(product_dict.values())) if len(all_products) > 1: raise TypeError( @@ -431,14 +298,9 @@ def __init__( "Please provide a valid `data_source` parameter indicating files of a single " "product" ) - elif len(all_products) == 0: - raise TypeError( - "No files found matching the specified `data_source`. Check your glob " - "string or file list." - ) - else: - # Assign the identified product to the property - self._product = all_products[0] + + # Assign the identified product to the property + self._product = all_products[0] if out_obj_type is not None: print( @@ -454,7 +316,8 @@ def __init__( def vars(self): """ Return the variables object associated with the data being read in. - This instance is generated from the source file or first file in a list of input files (when source is a directory). + This instance is generated from the source file or first file in a list of input files + (when source is a directory). See Also -------- @@ -467,6 +330,17 @@ def vars(self): """ + # fix to handle fact that some VersionID metadata is wrong + # see: https://forum.earthdata.nasa.gov/viewtopic.php?t=5154 + # (v006, v003, v003, respectively) + # Note that this results in a login being required even for a local file + # because otherwise Variables (variables.py) tries to get the version from the file (ln99). + bad_metadata = ["ATL11", "ATL14", "ATL15"] + if self._product in bad_metadata and not hasattr(self, "_read_vars"): + self._read_vars = Variables( + product=self._product, version=is2ref.latest_version(self._product) + ) + if not hasattr(self, "_read_vars"): self._read_vars = Variables(path=self.filelist[0]) return self._read_vars @@ -507,7 +381,8 @@ def _add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict): the second list contains the second portion of the group name, etc. "none" is used to fill in where paths are shorter than the longest path. wanted_dict : dict - Dictionary with variable names as keys and a list of group + variable paths containing those variables as values. + Dictionary with variable names as keys and a list of group + + variable paths containing those variables as values. Returns ------- @@ -600,7 +475,8 @@ def _add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict): ) ) - # for the subgoups where there is 1d delta time data, make sure that the cycle number is still a coordinate for merging + # for the subgoups where there is 1d delta time data, + # make sure that the cycle number is still a coordinate for merging try: ds = ds.assign_coords( { @@ -643,14 +519,16 @@ def _combine_nested_vars(is2ds, ds, grp_path, wanted_dict): grp_path : str hdf5 group path read into ds wanted_dict : dict - Dictionary with variable names as keys and a list of group + variable paths containing those variables as values. + Dictionary with variable names as keys and a list of group + + variable paths containing those variables as values. Returns ------- Xarray Dataset with variables from the ds variable group added. """ - # Dev Goal: improve this type of iterating to minimize amount of looping required. Would a path handling library be useful here? + # Dev Goal: improve this type of iterating to minimize amount of looping required. + # Would a path handling library be useful here? grp_spec_vars = [ k for k, v in wanted_dict.items() if any(f"{grp_path}/{k}" in x for x in v) ] @@ -682,7 +560,8 @@ def _combine_nested_vars(is2ds, ds, grp_path, wanted_dict): def load(self): """ - Create a single Xarray Dataset containing the data from one or more files and/or ground tracks. + Create a single Xarray Dataset containing the data from one or more + files and/or ground tracks. Uses icepyx's ICESat-2 data product awareness and Xarray's `combine_by_coords` function. All items in the wanted variables list will be loaded from the files into memory. @@ -753,8 +632,10 @@ def load(self): all_dss.append( self._build_single_file_dataset(file, groups_list) ) # wanted_groups, vgrp.keys())) - if isinstance(file, S3File): - file.close() + + # Closing the file prevents further operations on the dataset + # if isinstance(file, S3File): + # file.close() if len(all_dss) == 1: return all_dss[0] @@ -778,8 +659,6 @@ def _build_dataset_template(self, file): It may be possible to expand this function to provide multiple templates. """ - # NOTE: use the hdf5 library to grab the attr for the product specifier - # can ultimately then use it to check against user specified one or merge strategies (or to return a list of ds) is2ds = xr.Dataset( coords=dict( @@ -798,7 +677,8 @@ def _read_single_grp(self, file, grp_path): ---------- file : str Full path to ICESat-2 data file. - Currently tested for locally downloaded files; untested but hopefully works for s3 stored cloud files. + Currently tested for locally downloaded files; + untested but hopefully works for s3 stored cloud files. grp_path : str Full string to a variable group. E.g. 'gt1l/land_ice_segments' @@ -818,25 +698,35 @@ def _read_single_grp(self, file, grp_path): def _build_single_file_dataset(self, file, groups_list): """ - Create a single xarray dataset with all of the wanted variables/groups from the wanted var list for a single data file/url. + Create a single xarray dataset with all of the wanted variables/groups + from the wanted var list for a single data file/url. Parameters ---------- file : str Full path to ICESat-2 data file. - Currently tested for locally downloaded files; untested but hopefully works for s3 stored cloud files. + Currently tested for locally downloaded files; + untested but hopefully works for s3 stored cloud files. groups_list : list of strings List of full paths to data variables within the file. - e.g. ['orbit_info/sc_orient', 'gt1l/land_ice_segments/h_li', 'gt1l/land_ice_segments/latitude', 'gt1l/land_ice_segments/longitude'] + e.g. ['orbit_info/sc_orient', 'gt1l/land_ice_segments/h_li', + 'gt1l/land_ice_segments/latitude', 'gt1l/land_ice_segments/longitude'] Returns ------- Xarray Dataset """ - # DEVNOTE: if and elif does not actually apply wanted variable list, and has not been tested for merging multiple files into one ds - # if a gridded product + # returns wanted groups as a list of lists with group path string elements separated + _, wanted_groups_tiered = Variables.parse_var_list( + groups_list, tiered=True, tiered_vars=True + ) + + # DEVNOTE: elif does not actually apply wanted variable list, + # and has not been tested for merging multiple files into one ds + # of a gridded product # TODO: all products need to be tested, and quicklook products added or explicitly excluded + # consider looking for netcdf file extension instead of using product # Level 3b, gridded (netcdf): ATL14, 15, 16, 17, 18, 19, 20, 21 if self.product in [ "ATL14", @@ -849,7 +739,22 @@ def _build_single_file_dataset(self, file, groups_list): "ATL21", "ATL23", ]: - is2ds = xr.open_dataset(file) + wanted_grouponly_set = set(wanted_groups_tiered[0]) + wanted_groups_list = list(sorted(wanted_grouponly_set)) + if len(wanted_groups_list) == 1: + is2ds = self._read_single_grp(file, grp_path=wanted_groups_list[0]) + else: + is2ds = self._build_dataset_template(file) + while wanted_groups_list: + ds = self._read_single_grp(file, grp_path=wanted_groups_list[0]) + wanted_groups_list = wanted_groups_list[1:] + is2ds = is2ds.merge( + ds, join="outer", combine_attrs="drop_conflicts" + ) + if hasattr(is2ds, "description"): + is2ds.attrs["description"] = ( + "Group-level data descriptions were removed during Dataset creation." + ) # Level 3b, hdf5: ATL11 elif self.product in ["ATL11"]: @@ -861,17 +766,13 @@ def _build_single_file_dataset(self, file, groups_list): ) wanted_groups_set = set(wanted_groups) - # orbit_info is used automatically as the first group path so the info is available for the rest of the groups + # orbit_info is used automatically as the first group path + # so the info is available for the rest of the groups # wanted_groups_set.remove("orbit_info") wanted_groups_set.remove("ancillary_data") # Note: the sorting is critical for datasets with highly nested groups wanted_groups_list = ["ancillary_data"] + sorted(wanted_groups_set) - # returns the wanted groups as a list of lists with group path string elements separated - _, wanted_groups_tiered = Variables.parse_var_list( - groups_list, tiered=True, tiered_vars=True - ) - while wanted_groups_list: # print(wanted_groups_list) grp_path = wanted_groups_list[0] @@ -892,17 +793,14 @@ def _build_single_file_dataset(self, file, groups_list): groups_list, tiered=False ) wanted_groups_set = set(wanted_groups) - # orbit_info is used automatically as the first group path so the info is available for the rest of the groups + # orbit_info is used automatically as the first group path + # so the info is available for the rest of the groups wanted_groups_set.remove("orbit_info") wanted_groups_set.remove("ancillary_data") # Note: the sorting is critical for datasets with highly nested groups wanted_groups_list = ["orbit_info", "ancillary_data"] + sorted( wanted_groups_set ) - # returns the wanted groups as a list of lists with group path string elements separated - _, wanted_groups_tiered = Variables.parse_var_list( - groups_list, tiered=True, tiered_vars=True - ) while wanted_groups_list: grp_path = wanted_groups_list[0] @@ -912,7 +810,8 @@ def _build_single_file_dataset(self, file, groups_list): is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict ) - # if there are any deeper nested variables, get those so they have actual coordinates and add them + # if there are any deeper nested variables, + # get those so they have actual coordinates and add them # this may apply to (at a minimum): ATL08 if any(grp_path in grp_path2 for grp_path2 in wanted_groups_list): for grp_path2 in wanted_groups_list: diff --git a/icepyx/core/visualization.py b/icepyx/core/visualization.py index 001ae178e..edc10d66d 100644 --- a/icepyx/core/visualization.py +++ b/icepyx/core/visualization.py @@ -1,9 +1,8 @@ """ Interactive visualization of spatial extent and ICESat-2 elevations """ + import concurrent.futures -import datetime -import re import warnings import backoff diff --git a/icepyx/quest/dataset_scripts/dataset.py b/icepyx/quest/dataset_scripts/dataset.py index 193fab22e..7637b5d4f 100644 --- a/icepyx/quest/dataset_scripts/dataset.py +++ b/icepyx/quest/dataset_scripts/dataset.py @@ -1,11 +1,9 @@ import warnings -from icepyx.core.query import GenQuery warnings.filterwarnings("ignore") class DataSet: - """ Template parent class for all QUEST supported datasets (i.e. ICESat-2, Argo BGC, Argo, MODIS, etc.). All sub-classes must support the following methods for use via the QUEST class. diff --git a/icepyx/tests/ATL06v06_options.json b/icepyx/tests/ATL06v06_options.json index 7851446be..46f661f0b 100644 --- a/icepyx/tests/ATL06v06_options.json +++ b/icepyx/tests/ATL06v06_options.json @@ -1 +1 @@ -{"options": [{"id": "ICESAT2", "spatialSubsetting": "true", "spatialSubsettingShapefile": "true", "temporalSubsetting": "true", "type": "both", "maxGransSyncRequest": "100", "maxGransAsyncRequest": "2000"}], "fileformats": ["TABULAR_ASCII", "NetCDF4-CF", "Shapefile"], "reprojectionONLY": [], "noproj": [], "formatreproj": ["TABULAR_ASCII", "NetCDF4-CF", "Shapefile"], "variables": ["ancillary_data/atlas_sdp_gps_epoch", "ancillary_data/control", "ancillary_data/data_end_utc", "ancillary_data/data_start_utc", "ancillary_data/end_cycle", "ancillary_data/end_delta_time", "ancillary_data/end_geoseg", "ancillary_data/end_gpssow", "ancillary_data/end_gpsweek", "ancillary_data/end_orbit", "ancillary_data/end_region", "ancillary_data/end_rgt", "ancillary_data/granule_end_utc", "ancillary_data/granule_start_utc", "ancillary_data/qa_at_interval", "ancillary_data/release", "ancillary_data/start_cycle", "ancillary_data/start_delta_time", "ancillary_data/start_geoseg", "ancillary_data/start_gpssow", "ancillary_data/start_gpsweek", "ancillary_data/start_orbit", "ancillary_data/start_region", "ancillary_data/start_rgt", "ancillary_data/version", "ancillary_data/land_ice/dt_hist", "ancillary_data/land_ice/fit_maxiter", "ancillary_data/land_ice/fpb_maxiter", "ancillary_data/land_ice/max_res_ids", "ancillary_data/land_ice/min_dist", "ancillary_data/land_ice/min_gain_th", "ancillary_data/land_ice/min_n_pe", "ancillary_data/land_ice/min_n_sel", "ancillary_data/land_ice/min_signal_conf", "ancillary_data/land_ice/n_hist", "ancillary_data/land_ice/nhist_bins", "ancillary_data/land_ice/n_sigmas", "ancillary_data/land_ice/proc_interval", "ancillary_data/land_ice/qs_lim_bsc", "ancillary_data/land_ice/qs_lim_hrs", "ancillary_data/land_ice/qs_lim_hsigma", "ancillary_data/land_ice/qs_lim_msw", "ancillary_data/land_ice/qs_lim_snr", "ancillary_data/land_ice/qs_lim_sss", "ancillary_data/land_ice/rbin_width", "ancillary_data/land_ice/sigma_beam", "ancillary_data/land_ice/sigma_tx", "ancillary_data/land_ice/t_dead", "ancillary_data/land_ice/txp_maxiter", "gt1l/land_ice_segments/atl06_quality_summary", "gt1l/land_ice_segments/delta_time", "gt1l/land_ice_segments/h_li", "gt1l/land_ice_segments/h_li_sigma", "gt1l/land_ice_segments/latitude", "gt1l/land_ice_segments/longitude", "gt1l/land_ice_segments/segment_id", "gt1l/land_ice_segments/sigma_geo_h", "gt1l/land_ice_segments/bias_correction/fpb_mean_corr", "gt1l/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt1l/land_ice_segments/bias_correction/fpb_med_corr", "gt1l/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt1l/land_ice_segments/bias_correction/fpb_n_corr", "gt1l/land_ice_segments/bias_correction/med_r_fit", "gt1l/land_ice_segments/bias_correction/tx_mean_corr", "gt1l/land_ice_segments/bias_correction/tx_med_corr", "gt1l/land_ice_segments/dem/dem_flag", "gt1l/land_ice_segments/dem/dem_h", "gt1l/land_ice_segments/dem/geoid_free2mean", "gt1l/land_ice_segments/dem/geoid_h", "gt1l/land_ice_segments/fit_statistics/dh_fit_dx", "gt1l/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt1l/land_ice_segments/fit_statistics/dh_fit_dy", "gt1l/land_ice_segments/fit_statistics/h_expected_rms", "gt1l/land_ice_segments/fit_statistics/h_mean", "gt1l/land_ice_segments/fit_statistics/h_rms_misfit", "gt1l/land_ice_segments/fit_statistics/h_robust_sprd", "gt1l/land_ice_segments/fit_statistics/n_fit_photons", "gt1l/land_ice_segments/fit_statistics/n_seg_pulses", "gt1l/land_ice_segments/fit_statistics/sigma_h_mean", "gt1l/land_ice_segments/fit_statistics/signal_selection_source", "gt1l/land_ice_segments/fit_statistics/signal_selection_source_status", "gt1l/land_ice_segments/fit_statistics/snr", "gt1l/land_ice_segments/fit_statistics/snr_significance", "gt1l/land_ice_segments/fit_statistics/w_surface_window_final", "gt1l/land_ice_segments/geophysical/bckgrd", "gt1l/land_ice_segments/geophysical/bsnow_conf", "gt1l/land_ice_segments/geophysical/bsnow_h", "gt1l/land_ice_segments/geophysical/bsnow_od", "gt1l/land_ice_segments/geophysical/cloud_flg_asr", "gt1l/land_ice_segments/geophysical/cloud_flg_atm", "gt1l/land_ice_segments/geophysical/dac", "gt1l/land_ice_segments/geophysical/e_bckgrd", "gt1l/land_ice_segments/geophysical/layer_flag", "gt1l/land_ice_segments/geophysical/msw_flag", "gt1l/land_ice_segments/geophysical/neutat_delay_total", "gt1l/land_ice_segments/geophysical/r_eff", "gt1l/land_ice_segments/geophysical/solar_azimuth", "gt1l/land_ice_segments/geophysical/solar_elevation", "gt1l/land_ice_segments/geophysical/tide_earth", "gt1l/land_ice_segments/geophysical/tide_earth_free2mean", "gt1l/land_ice_segments/geophysical/tide_equilibrium", "gt1l/land_ice_segments/geophysical/tide_load", "gt1l/land_ice_segments/geophysical/tide_ocean", "gt1l/land_ice_segments/geophysical/tide_pole", "gt1l/land_ice_segments/ground_track/ref_azimuth", "gt1l/land_ice_segments/ground_track/ref_coelv", "gt1l/land_ice_segments/ground_track/seg_azimuth", "gt1l/land_ice_segments/ground_track/sigma_geo_at", "gt1l/land_ice_segments/ground_track/sigma_geo_r", "gt1l/land_ice_segments/ground_track/sigma_geo_xt", "gt1l/land_ice_segments/ground_track/x_atc", "gt1l/land_ice_segments/ground_track/y_atc", "gt1l/residual_histogram/bckgrd_per_m", "gt1l/residual_histogram/bin_top_h", "gt1l/residual_histogram/count", "gt1l/residual_histogram/delta_time", "gt1l/residual_histogram/ds_segment_id", "gt1l/residual_histogram/lat_mean", "gt1l/residual_histogram/lon_mean", "gt1l/residual_histogram/pulse_count", "gt1l/residual_histogram/segment_id_list", "gt1l/residual_histogram/x_atc_mean", "gt1l/segment_quality/delta_time", "gt1l/segment_quality/record_number", "gt1l/segment_quality/reference_pt_lat", "gt1l/segment_quality/reference_pt_lon", "gt1l/segment_quality/segment_id", "gt1l/segment_quality/signal_selection_source", "gt1l/segment_quality/signal_selection_status/signal_selection_status_all", "gt1l/segment_quality/signal_selection_status/signal_selection_status_backup", "gt1l/segment_quality/signal_selection_status/signal_selection_status_confident", "gt1r/land_ice_segments/atl06_quality_summary", "gt1r/land_ice_segments/delta_time", "gt1r/land_ice_segments/h_li", "gt1r/land_ice_segments/h_li_sigma", "gt1r/land_ice_segments/latitude", "gt1r/land_ice_segments/longitude", "gt1r/land_ice_segments/segment_id", "gt1r/land_ice_segments/sigma_geo_h", "gt1r/land_ice_segments/bias_correction/fpb_mean_corr", "gt1r/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt1r/land_ice_segments/bias_correction/fpb_med_corr", "gt1r/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt1r/land_ice_segments/bias_correction/fpb_n_corr", "gt1r/land_ice_segments/bias_correction/med_r_fit", "gt1r/land_ice_segments/bias_correction/tx_mean_corr", "gt1r/land_ice_segments/bias_correction/tx_med_corr", "gt1r/land_ice_segments/dem/dem_flag", "gt1r/land_ice_segments/dem/dem_h", "gt1r/land_ice_segments/dem/geoid_free2mean", "gt1r/land_ice_segments/dem/geoid_h", "gt1r/land_ice_segments/fit_statistics/dh_fit_dx", "gt1r/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt1r/land_ice_segments/fit_statistics/dh_fit_dy", "gt1r/land_ice_segments/fit_statistics/h_expected_rms", "gt1r/land_ice_segments/fit_statistics/h_mean", "gt1r/land_ice_segments/fit_statistics/h_rms_misfit", "gt1r/land_ice_segments/fit_statistics/h_robust_sprd", "gt1r/land_ice_segments/fit_statistics/n_fit_photons", "gt1r/land_ice_segments/fit_statistics/n_seg_pulses", "gt1r/land_ice_segments/fit_statistics/sigma_h_mean", "gt1r/land_ice_segments/fit_statistics/signal_selection_source", "gt1r/land_ice_segments/fit_statistics/signal_selection_source_status", "gt1r/land_ice_segments/fit_statistics/snr", "gt1r/land_ice_segments/fit_statistics/snr_significance", "gt1r/land_ice_segments/fit_statistics/w_surface_window_final", "gt1r/land_ice_segments/geophysical/bckgrd", "gt1r/land_ice_segments/geophysical/bsnow_conf", "gt1r/land_ice_segments/geophysical/bsnow_h", "gt1r/land_ice_segments/geophysical/bsnow_od", "gt1r/land_ice_segments/geophysical/cloud_flg_asr", "gt1r/land_ice_segments/geophysical/cloud_flg_atm", "gt1r/land_ice_segments/geophysical/dac", "gt1r/land_ice_segments/geophysical/e_bckgrd", "gt1r/land_ice_segments/geophysical/layer_flag", "gt1r/land_ice_segments/geophysical/msw_flag", "gt1r/land_ice_segments/geophysical/neutat_delay_total", "gt1r/land_ice_segments/geophysical/r_eff", "gt1r/land_ice_segments/geophysical/solar_azimuth", "gt1r/land_ice_segments/geophysical/solar_elevation", "gt1r/land_ice_segments/geophysical/tide_earth", "gt1r/land_ice_segments/geophysical/tide_earth_free2mean", "gt1r/land_ice_segments/geophysical/tide_equilibrium", "gt1r/land_ice_segments/geophysical/tide_load", "gt1r/land_ice_segments/geophysical/tide_ocean", "gt1r/land_ice_segments/geophysical/tide_pole", "gt1r/land_ice_segments/ground_track/ref_azimuth", "gt1r/land_ice_segments/ground_track/ref_coelv", "gt1r/land_ice_segments/ground_track/seg_azimuth", "gt1r/land_ice_segments/ground_track/sigma_geo_at", "gt1r/land_ice_segments/ground_track/sigma_geo_r", "gt1r/land_ice_segments/ground_track/sigma_geo_xt", "gt1r/land_ice_segments/ground_track/x_atc", "gt1r/land_ice_segments/ground_track/y_atc", "gt1r/residual_histogram/bckgrd_per_m", "gt1r/residual_histogram/bin_top_h", "gt1r/residual_histogram/count", "gt1r/residual_histogram/delta_time", "gt1r/residual_histogram/ds_segment_id", "gt1r/residual_histogram/lat_mean", "gt1r/residual_histogram/lon_mean", "gt1r/residual_histogram/pulse_count", "gt1r/residual_histogram/segment_id_list", "gt1r/residual_histogram/x_atc_mean", "gt1r/segment_quality/delta_time", "gt1r/segment_quality/record_number", "gt1r/segment_quality/reference_pt_lat", "gt1r/segment_quality/reference_pt_lon", "gt1r/segment_quality/segment_id", "gt1r/segment_quality/signal_selection_source", "gt1r/segment_quality/signal_selection_status/signal_selection_status_all", "gt1r/segment_quality/signal_selection_status/signal_selection_status_backup", "gt1r/segment_quality/signal_selection_status/signal_selection_status_confident", "gt2l/land_ice_segments/atl06_quality_summary", "gt2l/land_ice_segments/delta_time", "gt2l/land_ice_segments/h_li", "gt2l/land_ice_segments/h_li_sigma", "gt2l/land_ice_segments/latitude", "gt2l/land_ice_segments/longitude", "gt2l/land_ice_segments/segment_id", "gt2l/land_ice_segments/sigma_geo_h", "gt2l/land_ice_segments/bias_correction/fpb_mean_corr", "gt2l/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt2l/land_ice_segments/bias_correction/fpb_med_corr", "gt2l/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt2l/land_ice_segments/bias_correction/fpb_n_corr", "gt2l/land_ice_segments/bias_correction/med_r_fit", "gt2l/land_ice_segments/bias_correction/tx_mean_corr", "gt2l/land_ice_segments/bias_correction/tx_med_corr", "gt2l/land_ice_segments/dem/dem_flag", "gt2l/land_ice_segments/dem/dem_h", "gt2l/land_ice_segments/dem/geoid_free2mean", "gt2l/land_ice_segments/dem/geoid_h", "gt2l/land_ice_segments/fit_statistics/dh_fit_dx", "gt2l/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt2l/land_ice_segments/fit_statistics/dh_fit_dy", "gt2l/land_ice_segments/fit_statistics/h_expected_rms", "gt2l/land_ice_segments/fit_statistics/h_mean", "gt2l/land_ice_segments/fit_statistics/h_rms_misfit", "gt2l/land_ice_segments/fit_statistics/h_robust_sprd", "gt2l/land_ice_segments/fit_statistics/n_fit_photons", "gt2l/land_ice_segments/fit_statistics/n_seg_pulses", "gt2l/land_ice_segments/fit_statistics/sigma_h_mean", "gt2l/land_ice_segments/fit_statistics/signal_selection_source", "gt2l/land_ice_segments/fit_statistics/signal_selection_source_status", "gt2l/land_ice_segments/fit_statistics/snr", "gt2l/land_ice_segments/fit_statistics/snr_significance", "gt2l/land_ice_segments/fit_statistics/w_surface_window_final", "gt2l/land_ice_segments/geophysical/bckgrd", "gt2l/land_ice_segments/geophysical/bsnow_conf", "gt2l/land_ice_segments/geophysical/bsnow_h", "gt2l/land_ice_segments/geophysical/bsnow_od", "gt2l/land_ice_segments/geophysical/cloud_flg_asr", "gt2l/land_ice_segments/geophysical/cloud_flg_atm", "gt2l/land_ice_segments/geophysical/dac", "gt2l/land_ice_segments/geophysical/e_bckgrd", "gt2l/land_ice_segments/geophysical/layer_flag", "gt2l/land_ice_segments/geophysical/msw_flag", "gt2l/land_ice_segments/geophysical/neutat_delay_total", "gt2l/land_ice_segments/geophysical/r_eff", "gt2l/land_ice_segments/geophysical/solar_azimuth", "gt2l/land_ice_segments/geophysical/solar_elevation", "gt2l/land_ice_segments/geophysical/tide_earth", "gt2l/land_ice_segments/geophysical/tide_earth_free2mean", "gt2l/land_ice_segments/geophysical/tide_equilibrium", "gt2l/land_ice_segments/geophysical/tide_load", "gt2l/land_ice_segments/geophysical/tide_ocean", "gt2l/land_ice_segments/geophysical/tide_pole", "gt2l/land_ice_segments/ground_track/ref_azimuth", "gt2l/land_ice_segments/ground_track/ref_coelv", "gt2l/land_ice_segments/ground_track/seg_azimuth", "gt2l/land_ice_segments/ground_track/sigma_geo_at", "gt2l/land_ice_segments/ground_track/sigma_geo_r", "gt2l/land_ice_segments/ground_track/sigma_geo_xt", "gt2l/land_ice_segments/ground_track/x_atc", "gt2l/land_ice_segments/ground_track/y_atc", "gt2l/residual_histogram/bckgrd_per_m", "gt2l/residual_histogram/bin_top_h", "gt2l/residual_histogram/count", "gt2l/residual_histogram/delta_time", "gt2l/residual_histogram/ds_segment_id", "gt2l/residual_histogram/lat_mean", "gt2l/residual_histogram/lon_mean", "gt2l/residual_histogram/pulse_count", "gt2l/residual_histogram/segment_id_list", "gt2l/residual_histogram/x_atc_mean", "gt2l/segment_quality/delta_time", "gt2l/segment_quality/record_number", "gt2l/segment_quality/reference_pt_lat", "gt2l/segment_quality/reference_pt_lon", "gt2l/segment_quality/segment_id", "gt2l/segment_quality/signal_selection_source", "gt2l/segment_quality/signal_selection_status/signal_selection_status_all", "gt2l/segment_quality/signal_selection_status/signal_selection_status_backup", "gt2l/segment_quality/signal_selection_status/signal_selection_status_confident", "gt2r/land_ice_segments/atl06_quality_summary", "gt2r/land_ice_segments/delta_time", "gt2r/land_ice_segments/h_li", "gt2r/land_ice_segments/h_li_sigma", "gt2r/land_ice_segments/latitude", "gt2r/land_ice_segments/longitude", "gt2r/land_ice_segments/segment_id", "gt2r/land_ice_segments/sigma_geo_h", "gt2r/land_ice_segments/bias_correction/fpb_mean_corr", "gt2r/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt2r/land_ice_segments/bias_correction/fpb_med_corr", "gt2r/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt2r/land_ice_segments/bias_correction/fpb_n_corr", "gt2r/land_ice_segments/bias_correction/med_r_fit", "gt2r/land_ice_segments/bias_correction/tx_mean_corr", "gt2r/land_ice_segments/bias_correction/tx_med_corr", "gt2r/land_ice_segments/dem/dem_flag", "gt2r/land_ice_segments/dem/dem_h", "gt2r/land_ice_segments/dem/geoid_free2mean", "gt2r/land_ice_segments/dem/geoid_h", "gt2r/land_ice_segments/fit_statistics/dh_fit_dx", "gt2r/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt2r/land_ice_segments/fit_statistics/dh_fit_dy", "gt2r/land_ice_segments/fit_statistics/h_expected_rms", "gt2r/land_ice_segments/fit_statistics/h_mean", "gt2r/land_ice_segments/fit_statistics/h_rms_misfit", "gt2r/land_ice_segments/fit_statistics/h_robust_sprd", "gt2r/land_ice_segments/fit_statistics/n_fit_photons", "gt2r/land_ice_segments/fit_statistics/n_seg_pulses", "gt2r/land_ice_segments/fit_statistics/sigma_h_mean", "gt2r/land_ice_segments/fit_statistics/signal_selection_source", "gt2r/land_ice_segments/fit_statistics/signal_selection_source_status", "gt2r/land_ice_segments/fit_statistics/snr", "gt2r/land_ice_segments/fit_statistics/snr_significance", "gt2r/land_ice_segments/fit_statistics/w_surface_window_final", "gt2r/land_ice_segments/geophysical/bckgrd", "gt2r/land_ice_segments/geophysical/bsnow_conf", "gt2r/land_ice_segments/geophysical/bsnow_h", "gt2r/land_ice_segments/geophysical/bsnow_od", "gt2r/land_ice_segments/geophysical/cloud_flg_asr", "gt2r/land_ice_segments/geophysical/cloud_flg_atm", "gt2r/land_ice_segments/geophysical/dac", "gt2r/land_ice_segments/geophysical/e_bckgrd", "gt2r/land_ice_segments/geophysical/layer_flag", "gt2r/land_ice_segments/geophysical/msw_flag", "gt2r/land_ice_segments/geophysical/neutat_delay_total", "gt2r/land_ice_segments/geophysical/r_eff", "gt2r/land_ice_segments/geophysical/solar_azimuth", "gt2r/land_ice_segments/geophysical/solar_elevation", "gt2r/land_ice_segments/geophysical/tide_earth", "gt2r/land_ice_segments/geophysical/tide_earth_free2mean", "gt2r/land_ice_segments/geophysical/tide_equilibrium", "gt2r/land_ice_segments/geophysical/tide_load", "gt2r/land_ice_segments/geophysical/tide_ocean", "gt2r/land_ice_segments/geophysical/tide_pole", "gt2r/land_ice_segments/ground_track/ref_azimuth", "gt2r/land_ice_segments/ground_track/ref_coelv", "gt2r/land_ice_segments/ground_track/seg_azimuth", "gt2r/land_ice_segments/ground_track/sigma_geo_at", "gt2r/land_ice_segments/ground_track/sigma_geo_r", "gt2r/land_ice_segments/ground_track/sigma_geo_xt", "gt2r/land_ice_segments/ground_track/x_atc", "gt2r/land_ice_segments/ground_track/y_atc", "gt2r/residual_histogram/bckgrd_per_m", "gt2r/residual_histogram/bin_top_h", "gt2r/residual_histogram/count", "gt2r/residual_histogram/delta_time", "gt2r/residual_histogram/ds_segment_id", "gt2r/residual_histogram/lat_mean", "gt2r/residual_histogram/lon_mean", "gt2r/residual_histogram/pulse_count", "gt2r/residual_histogram/segment_id_list", "gt2r/residual_histogram/x_atc_mean", "gt2r/segment_quality/delta_time", "gt2r/segment_quality/record_number", "gt2r/segment_quality/reference_pt_lat", "gt2r/segment_quality/reference_pt_lon", "gt2r/segment_quality/segment_id", "gt2r/segment_quality/signal_selection_source", "gt2r/segment_quality/signal_selection_status/signal_selection_status_all", "gt2r/segment_quality/signal_selection_status/signal_selection_status_backup", "gt2r/segment_quality/signal_selection_status/signal_selection_status_confident", "gt3l/land_ice_segments/atl06_quality_summary", "gt3l/land_ice_segments/delta_time", "gt3l/land_ice_segments/h_li", "gt3l/land_ice_segments/h_li_sigma", "gt3l/land_ice_segments/latitude", "gt3l/land_ice_segments/longitude", "gt3l/land_ice_segments/segment_id", "gt3l/land_ice_segments/sigma_geo_h", "gt3l/land_ice_segments/bias_correction/fpb_mean_corr", "gt3l/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt3l/land_ice_segments/bias_correction/fpb_med_corr", "gt3l/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt3l/land_ice_segments/bias_correction/fpb_n_corr", "gt3l/land_ice_segments/bias_correction/med_r_fit", "gt3l/land_ice_segments/bias_correction/tx_mean_corr", "gt3l/land_ice_segments/bias_correction/tx_med_corr", "gt3l/land_ice_segments/dem/dem_flag", "gt3l/land_ice_segments/dem/dem_h", "gt3l/land_ice_segments/dem/geoid_free2mean", "gt3l/land_ice_segments/dem/geoid_h", "gt3l/land_ice_segments/fit_statistics/dh_fit_dx", "gt3l/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt3l/land_ice_segments/fit_statistics/dh_fit_dy", "gt3l/land_ice_segments/fit_statistics/h_expected_rms", "gt3l/land_ice_segments/fit_statistics/h_mean", "gt3l/land_ice_segments/fit_statistics/h_rms_misfit", "gt3l/land_ice_segments/fit_statistics/h_robust_sprd", "gt3l/land_ice_segments/fit_statistics/n_fit_photons", "gt3l/land_ice_segments/fit_statistics/n_seg_pulses", "gt3l/land_ice_segments/fit_statistics/sigma_h_mean", "gt3l/land_ice_segments/fit_statistics/signal_selection_source", "gt3l/land_ice_segments/fit_statistics/signal_selection_source_status", "gt3l/land_ice_segments/fit_statistics/snr", "gt3l/land_ice_segments/fit_statistics/snr_significance", "gt3l/land_ice_segments/fit_statistics/w_surface_window_final", "gt3l/land_ice_segments/geophysical/bckgrd", "gt3l/land_ice_segments/geophysical/bsnow_conf", "gt3l/land_ice_segments/geophysical/bsnow_h", "gt3l/land_ice_segments/geophysical/bsnow_od", "gt3l/land_ice_segments/geophysical/cloud_flg_asr", "gt3l/land_ice_segments/geophysical/cloud_flg_atm", "gt3l/land_ice_segments/geophysical/dac", "gt3l/land_ice_segments/geophysical/e_bckgrd", "gt3l/land_ice_segments/geophysical/layer_flag", "gt3l/land_ice_segments/geophysical/msw_flag", "gt3l/land_ice_segments/geophysical/neutat_delay_total", "gt3l/land_ice_segments/geophysical/r_eff", "gt3l/land_ice_segments/geophysical/solar_azimuth", "gt3l/land_ice_segments/geophysical/solar_elevation", "gt3l/land_ice_segments/geophysical/tide_earth", "gt3l/land_ice_segments/geophysical/tide_earth_free2mean", "gt3l/land_ice_segments/geophysical/tide_equilibrium", "gt3l/land_ice_segments/geophysical/tide_load", "gt3l/land_ice_segments/geophysical/tide_ocean", "gt3l/land_ice_segments/geophysical/tide_pole", "gt3l/land_ice_segments/ground_track/ref_azimuth", "gt3l/land_ice_segments/ground_track/ref_coelv", "gt3l/land_ice_segments/ground_track/seg_azimuth", "gt3l/land_ice_segments/ground_track/sigma_geo_at", "gt3l/land_ice_segments/ground_track/sigma_geo_r", "gt3l/land_ice_segments/ground_track/sigma_geo_xt", "gt3l/land_ice_segments/ground_track/x_atc", "gt3l/land_ice_segments/ground_track/y_atc", "gt3l/residual_histogram/bckgrd_per_m", "gt3l/residual_histogram/bin_top_h", "gt3l/residual_histogram/count", "gt3l/residual_histogram/delta_time", "gt3l/residual_histogram/ds_segment_id", "gt3l/residual_histogram/lat_mean", "gt3l/residual_histogram/lon_mean", "gt3l/residual_histogram/pulse_count", "gt3l/residual_histogram/segment_id_list", "gt3l/residual_histogram/x_atc_mean", "gt3l/segment_quality/delta_time", "gt3l/segment_quality/record_number", "gt3l/segment_quality/reference_pt_lat", "gt3l/segment_quality/reference_pt_lon", "gt3l/segment_quality/segment_id", "gt3l/segment_quality/signal_selection_source", "gt3l/segment_quality/signal_selection_status/signal_selection_status_all", "gt3l/segment_quality/signal_selection_status/signal_selection_status_backup", "gt3l/segment_quality/signal_selection_status/signal_selection_status_confident", "gt3r/land_ice_segments/atl06_quality_summary", "gt3r/land_ice_segments/delta_time", "gt3r/land_ice_segments/h_li", "gt3r/land_ice_segments/h_li_sigma", "gt3r/land_ice_segments/latitude", "gt3r/land_ice_segments/longitude", "gt3r/land_ice_segments/segment_id", "gt3r/land_ice_segments/sigma_geo_h", "gt3r/land_ice_segments/bias_correction/fpb_mean_corr", "gt3r/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt3r/land_ice_segments/bias_correction/fpb_med_corr", "gt3r/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt3r/land_ice_segments/bias_correction/fpb_n_corr", "gt3r/land_ice_segments/bias_correction/med_r_fit", "gt3r/land_ice_segments/bias_correction/tx_mean_corr", "gt3r/land_ice_segments/bias_correction/tx_med_corr", "gt3r/land_ice_segments/dem/dem_flag", "gt3r/land_ice_segments/dem/dem_h", "gt3r/land_ice_segments/dem/geoid_free2mean", "gt3r/land_ice_segments/dem/geoid_h", "gt3r/land_ice_segments/fit_statistics/dh_fit_dx", "gt3r/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt3r/land_ice_segments/fit_statistics/dh_fit_dy", "gt3r/land_ice_segments/fit_statistics/h_expected_rms", "gt3r/land_ice_segments/fit_statistics/h_mean", "gt3r/land_ice_segments/fit_statistics/h_rms_misfit", "gt3r/land_ice_segments/fit_statistics/h_robust_sprd", "gt3r/land_ice_segments/fit_statistics/n_fit_photons", "gt3r/land_ice_segments/fit_statistics/n_seg_pulses", "gt3r/land_ice_segments/fit_statistics/sigma_h_mean", "gt3r/land_ice_segments/fit_statistics/signal_selection_source", "gt3r/land_ice_segments/fit_statistics/signal_selection_source_status", "gt3r/land_ice_segments/fit_statistics/snr", "gt3r/land_ice_segments/fit_statistics/snr_significance", "gt3r/land_ice_segments/fit_statistics/w_surface_window_final", "gt3r/land_ice_segments/geophysical/bckgrd", "gt3r/land_ice_segments/geophysical/bsnow_conf", "gt3r/land_ice_segments/geophysical/bsnow_h", "gt3r/land_ice_segments/geophysical/bsnow_od", "gt3r/land_ice_segments/geophysical/cloud_flg_asr", "gt3r/land_ice_segments/geophysical/cloud_flg_atm", "gt3r/land_ice_segments/geophysical/dac", "gt3r/land_ice_segments/geophysical/e_bckgrd", "gt3r/land_ice_segments/geophysical/layer_flag", "gt3r/land_ice_segments/geophysical/msw_flag", "gt3r/land_ice_segments/geophysical/neutat_delay_total", "gt3r/land_ice_segments/geophysical/r_eff", "gt3r/land_ice_segments/geophysical/solar_azimuth", "gt3r/land_ice_segments/geophysical/solar_elevation", "gt3r/land_ice_segments/geophysical/tide_earth", "gt3r/land_ice_segments/geophysical/tide_earth_free2mean", "gt3r/land_ice_segments/geophysical/tide_equilibrium", "gt3r/land_ice_segments/geophysical/tide_load", "gt3r/land_ice_segments/geophysical/tide_ocean", "gt3r/land_ice_segments/geophysical/tide_pole", "gt3r/land_ice_segments/ground_track/ref_azimuth", "gt3r/land_ice_segments/ground_track/ref_coelv", "gt3r/land_ice_segments/ground_track/seg_azimuth", "gt3r/land_ice_segments/ground_track/sigma_geo_at", "gt3r/land_ice_segments/ground_track/sigma_geo_r", "gt3r/land_ice_segments/ground_track/sigma_geo_xt", "gt3r/land_ice_segments/ground_track/x_atc", "gt3r/land_ice_segments/ground_track/y_atc", "gt3r/residual_histogram/bckgrd_per_m", "gt3r/residual_histogram/bin_top_h", "gt3r/residual_histogram/count", "gt3r/residual_histogram/delta_time", "gt3r/residual_histogram/ds_segment_id", "gt3r/residual_histogram/lat_mean", "gt3r/residual_histogram/lon_mean", "gt3r/residual_histogram/pulse_count", "gt3r/residual_histogram/segment_id_list", "gt3r/residual_histogram/x_atc_mean", "gt3r/segment_quality/delta_time", "gt3r/segment_quality/record_number", "gt3r/segment_quality/reference_pt_lat", "gt3r/segment_quality/reference_pt_lon", "gt3r/segment_quality/segment_id", "gt3r/segment_quality/signal_selection_source", "gt3r/segment_quality/signal_selection_status/signal_selection_status_all", "gt3r/segment_quality/signal_selection_status/signal_selection_status_backup", "gt3r/segment_quality/signal_selection_status/signal_selection_status_confident", "orbit_info/bounding_polygon_lat1", "orbit_info/bounding_polygon_lon1", "orbit_info/crossing_time", "orbit_info/cycle_number", "orbit_info/lan", "orbit_info/orbit_number", "orbit_info/rgt", "orbit_info/sc_orient", "orbit_info/sc_orient_time", "quality_assessment/qa_granule_fail_reason", "quality_assessment/qa_granule_pass_fail", "quality_assessment/gt1l/delta_time", "quality_assessment/gt1l/lat_mean", "quality_assessment/gt1l/lon_mean", "quality_assessment/gt1l/signal_selection_source_fraction_0", "quality_assessment/gt1l/signal_selection_source_fraction_1", "quality_assessment/gt1l/signal_selection_source_fraction_2", "quality_assessment/gt1l/signal_selection_source_fraction_3", "quality_assessment/gt1r/delta_time", "quality_assessment/gt1r/lat_mean", "quality_assessment/gt1r/lon_mean", "quality_assessment/gt1r/signal_selection_source_fraction_0", "quality_assessment/gt1r/signal_selection_source_fraction_1", "quality_assessment/gt1r/signal_selection_source_fraction_2", "quality_assessment/gt1r/signal_selection_source_fraction_3", "quality_assessment/gt2l/delta_time", "quality_assessment/gt2l/lat_mean", "quality_assessment/gt2l/lon_mean", "quality_assessment/gt2l/signal_selection_source_fraction_0", "quality_assessment/gt2l/signal_selection_source_fraction_1", "quality_assessment/gt2l/signal_selection_source_fraction_2", "quality_assessment/gt2l/signal_selection_source_fraction_3", "quality_assessment/gt2r/delta_time", "quality_assessment/gt2r/lat_mean", "quality_assessment/gt2r/lon_mean", "quality_assessment/gt2r/signal_selection_source_fraction_0", "quality_assessment/gt2r/signal_selection_source_fraction_1", "quality_assessment/gt2r/signal_selection_source_fraction_2", "quality_assessment/gt2r/signal_selection_source_fraction_3", "quality_assessment/gt3l/delta_time", "quality_assessment/gt3l/lat_mean", "quality_assessment/gt3l/lon_mean", "quality_assessment/gt3l/signal_selection_source_fraction_0", "quality_assessment/gt3l/signal_selection_source_fraction_1", "quality_assessment/gt3l/signal_selection_source_fraction_2", "quality_assessment/gt3l/signal_selection_source_fraction_3", "quality_assessment/gt3r/delta_time", "quality_assessment/gt3r/lat_mean", "quality_assessment/gt3r/lon_mean", "quality_assessment/gt3r/signal_selection_source_fraction_0", "quality_assessment/gt3r/signal_selection_source_fraction_1", "quality_assessment/gt3r/signal_selection_source_fraction_2", "quality_assessment/gt3r/signal_selection_source_fraction_3"]} \ No newline at end of file +{"options": [{"id": "ICESAT2", "spatialSubsetting": "true", "spatialSubsettingShapefile": "true", "temporalSubsetting": "true", "type": "both", "maxGransSyncRequest": "100", "maxGransAsyncRequest": "2000"}], "fileformats": ["TABULAR_ASCII", "NetCDF4-CF", "Shapefile"], "reprojectionONLY": [], "noproj": [], "formatreproj": ["TABULAR_ASCII", "NetCDF4-CF", "Shapefile"], "variables": ["ancillary_data/atlas_sdp_gps_epoch", "ancillary_data/control", "ancillary_data/data_end_utc", "ancillary_data/data_start_utc", "ancillary_data/end_cycle", "ancillary_data/end_delta_time", "ancillary_data/end_geoseg", "ancillary_data/end_gpssow", "ancillary_data/end_gpsweek", "ancillary_data/end_orbit", "ancillary_data/end_region", "ancillary_data/end_rgt", "ancillary_data/granule_end_utc", "ancillary_data/granule_start_utc", "ancillary_data/qa_at_interval", "ancillary_data/release", "ancillary_data/start_cycle", "ancillary_data/start_delta_time", "ancillary_data/start_geoseg", "ancillary_data/start_gpssow", "ancillary_data/start_gpsweek", "ancillary_data/start_orbit", "ancillary_data/start_region", "ancillary_data/start_rgt", "ancillary_data/version", "ancillary_data/land_ice/dt_hist", "ancillary_data/land_ice/fit_maxiter", "ancillary_data/land_ice/fpb_maxiter", "ancillary_data/land_ice/max_res_ids", "ancillary_data/land_ice/min_dist", "ancillary_data/land_ice/min_gain_th", "ancillary_data/land_ice/min_n_pe", "ancillary_data/land_ice/min_n_sel", "ancillary_data/land_ice/min_signal_conf", "ancillary_data/land_ice/n_hist", "ancillary_data/land_ice/nhist_bins", "ancillary_data/land_ice/n_sigmas", "ancillary_data/land_ice/proc_interval", "ancillary_data/land_ice/qs_lim_bsc", "ancillary_data/land_ice/qs_lim_hrs", "ancillary_data/land_ice/qs_lim_hsigma", "ancillary_data/land_ice/qs_lim_msw", "ancillary_data/land_ice/qs_lim_snr", "ancillary_data/land_ice/qs_lim_sss", "ancillary_data/land_ice/rbin_width", "ancillary_data/land_ice/sigma_beam", "ancillary_data/land_ice/sigma_tx", "ancillary_data/land_ice/t_dead", "ancillary_data/land_ice/txp_maxiter", "gt1l/land_ice_segments/atl06_quality_summary", "gt1l/land_ice_segments/delta_time", "gt1l/land_ice_segments/h_li", "gt1l/land_ice_segments/h_li_sigma", "gt1l/land_ice_segments/latitude", "gt1l/land_ice_segments/longitude", "gt1l/land_ice_segments/segment_id", "gt1l/land_ice_segments/sigma_geo_h", "gt1l/land_ice_segments/bias_correction/fpb_mean_corr", "gt1l/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt1l/land_ice_segments/bias_correction/fpb_med_corr", "gt1l/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt1l/land_ice_segments/bias_correction/fpb_n_corr", "gt1l/land_ice_segments/bias_correction/med_r_fit", "gt1l/land_ice_segments/bias_correction/tx_mean_corr", "gt1l/land_ice_segments/bias_correction/tx_med_corr", "gt1l/land_ice_segments/dem/dem_flag", "gt1l/land_ice_segments/dem/dem_h", "gt1l/land_ice_segments/dem/geoid_free2mean", "gt1l/land_ice_segments/dem/geoid_h", "gt1l/land_ice_segments/fit_statistics/dh_fit_dx", "gt1l/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt1l/land_ice_segments/fit_statistics/dh_fit_dy", "gt1l/land_ice_segments/fit_statistics/h_expected_rms", "gt1l/land_ice_segments/fit_statistics/h_mean", "gt1l/land_ice_segments/fit_statistics/h_rms_misfit", "gt1l/land_ice_segments/fit_statistics/h_robust_sprd", "gt1l/land_ice_segments/fit_statistics/n_fit_photons", "gt1l/land_ice_segments/fit_statistics/n_seg_pulses", "gt1l/land_ice_segments/fit_statistics/sigma_h_mean", "gt1l/land_ice_segments/fit_statistics/signal_selection_source", "gt1l/land_ice_segments/fit_statistics/signal_selection_source_status", "gt1l/land_ice_segments/fit_statistics/snr", "gt1l/land_ice_segments/fit_statistics/snr_significance", "gt1l/land_ice_segments/fit_statistics/w_surface_window_final", "gt1l/land_ice_segments/geophysical/bckgrd", "gt1l/land_ice_segments/geophysical/bsnow_conf", "gt1l/land_ice_segments/geophysical/bsnow_h", "gt1l/land_ice_segments/geophysical/bsnow_od", "gt1l/land_ice_segments/geophysical/cloud_flg_asr", "gt1l/land_ice_segments/geophysical/cloud_flg_atm", "gt1l/land_ice_segments/geophysical/dac", "gt1l/land_ice_segments/geophysical/e_bckgrd", "gt1l/land_ice_segments/geophysical/layer_flag", "gt1l/land_ice_segments/geophysical/msw_flag", "gt1l/land_ice_segments/geophysical/neutat_delay_total", "gt1l/land_ice_segments/geophysical/r_eff", "gt1l/land_ice_segments/geophysical/solar_azimuth", "gt1l/land_ice_segments/geophysical/solar_elevation", "gt1l/land_ice_segments/geophysical/tide_earth", "gt1l/land_ice_segments/geophysical/tide_earth_free2mean", "gt1l/land_ice_segments/geophysical/tide_equilibrium", "gt1l/land_ice_segments/geophysical/tide_load", "gt1l/land_ice_segments/geophysical/tide_ocean", "gt1l/land_ice_segments/geophysical/tide_pole", "gt1l/land_ice_segments/ground_track/ref_azimuth", "gt1l/land_ice_segments/ground_track/ref_coelv", "gt1l/land_ice_segments/ground_track/seg_azimuth", "gt1l/land_ice_segments/ground_track/sigma_geo_at", "gt1l/land_ice_segments/ground_track/sigma_geo_r", "gt1l/land_ice_segments/ground_track/sigma_geo_xt", "gt1l/land_ice_segments/ground_track/x_atc", "gt1l/land_ice_segments/ground_track/y_atc", "gt1l/residual_histogram/bckgrd_per_m", "gt1l/residual_histogram/bin_top_h", "gt1l/residual_histogram/count", "gt1l/residual_histogram/delta_time", "gt1l/residual_histogram/ds_segment_id", "gt1l/residual_histogram/lat_mean", "gt1l/residual_histogram/lon_mean", "gt1l/residual_histogram/pulse_count", "gt1l/residual_histogram/segment_id_list", "gt1l/residual_histogram/x_atc_mean", "gt1l/segment_quality/delta_time", "gt1l/segment_quality/record_number", "gt1l/segment_quality/reference_pt_lat", "gt1l/segment_quality/reference_pt_lon", "gt1l/segment_quality/segment_id", "gt1l/segment_quality/signal_selection_source", "gt1l/segment_quality/signal_selection_status/signal_selection_status_all", "gt1l/segment_quality/signal_selection_status/signal_selection_status_backup", "gt1l/segment_quality/signal_selection_status/signal_selection_status_confident", "gt1r/land_ice_segments/atl06_quality_summary", "gt1r/land_ice_segments/delta_time", "gt1r/land_ice_segments/h_li", "gt1r/land_ice_segments/h_li_sigma", "gt1r/land_ice_segments/latitude", "gt1r/land_ice_segments/longitude", "gt1r/land_ice_segments/segment_id", "gt1r/land_ice_segments/sigma_geo_h", "gt1r/land_ice_segments/bias_correction/fpb_mean_corr", "gt1r/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt1r/land_ice_segments/bias_correction/fpb_med_corr", "gt1r/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt1r/land_ice_segments/bias_correction/fpb_n_corr", "gt1r/land_ice_segments/bias_correction/med_r_fit", "gt1r/land_ice_segments/bias_correction/tx_mean_corr", "gt1r/land_ice_segments/bias_correction/tx_med_corr", "gt1r/land_ice_segments/dem/dem_flag", "gt1r/land_ice_segments/dem/dem_h", "gt1r/land_ice_segments/dem/geoid_free2mean", "gt1r/land_ice_segments/dem/geoid_h", "gt1r/land_ice_segments/fit_statistics/dh_fit_dx", "gt1r/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt1r/land_ice_segments/fit_statistics/dh_fit_dy", "gt1r/land_ice_segments/fit_statistics/h_expected_rms", "gt1r/land_ice_segments/fit_statistics/h_mean", "gt1r/land_ice_segments/fit_statistics/h_rms_misfit", "gt1r/land_ice_segments/fit_statistics/h_robust_sprd", "gt1r/land_ice_segments/fit_statistics/n_fit_photons", "gt1r/land_ice_segments/fit_statistics/n_seg_pulses", "gt1r/land_ice_segments/fit_statistics/sigma_h_mean", "gt1r/land_ice_segments/fit_statistics/signal_selection_source", "gt1r/land_ice_segments/fit_statistics/signal_selection_source_status", "gt1r/land_ice_segments/fit_statistics/snr", "gt1r/land_ice_segments/fit_statistics/snr_significance", "gt1r/land_ice_segments/fit_statistics/w_surface_window_final", "gt1r/land_ice_segments/geophysical/bckgrd", "gt1r/land_ice_segments/geophysical/bsnow_conf", "gt1r/land_ice_segments/geophysical/bsnow_h", "gt1r/land_ice_segments/geophysical/bsnow_od", "gt1r/land_ice_segments/geophysical/cloud_flg_asr", "gt1r/land_ice_segments/geophysical/cloud_flg_atm", "gt1r/land_ice_segments/geophysical/dac", "gt1r/land_ice_segments/geophysical/e_bckgrd", "gt1r/land_ice_segments/geophysical/layer_flag", "gt1r/land_ice_segments/geophysical/msw_flag", "gt1r/land_ice_segments/geophysical/neutat_delay_total", "gt1r/land_ice_segments/geophysical/r_eff", "gt1r/land_ice_segments/geophysical/solar_azimuth", "gt1r/land_ice_segments/geophysical/solar_elevation", "gt1r/land_ice_segments/geophysical/tide_earth", "gt1r/land_ice_segments/geophysical/tide_earth_free2mean", "gt1r/land_ice_segments/geophysical/tide_equilibrium", "gt1r/land_ice_segments/geophysical/tide_load", "gt1r/land_ice_segments/geophysical/tide_ocean", "gt1r/land_ice_segments/geophysical/tide_pole", "gt1r/land_ice_segments/ground_track/ref_azimuth", "gt1r/land_ice_segments/ground_track/ref_coelv", "gt1r/land_ice_segments/ground_track/seg_azimuth", "gt1r/land_ice_segments/ground_track/sigma_geo_at", "gt1r/land_ice_segments/ground_track/sigma_geo_r", "gt1r/land_ice_segments/ground_track/sigma_geo_xt", "gt1r/land_ice_segments/ground_track/x_atc", "gt1r/land_ice_segments/ground_track/y_atc", "gt1r/residual_histogram/bckgrd_per_m", "gt1r/residual_histogram/bin_top_h", "gt1r/residual_histogram/count", "gt1r/residual_histogram/delta_time", "gt1r/residual_histogram/ds_segment_id", "gt1r/residual_histogram/lat_mean", "gt1r/residual_histogram/lon_mean", "gt1r/residual_histogram/pulse_count", "gt1r/residual_histogram/segment_id_list", "gt1r/residual_histogram/x_atc_mean", "gt1r/segment_quality/delta_time", "gt1r/segment_quality/record_number", "gt1r/segment_quality/reference_pt_lat", "gt1r/segment_quality/reference_pt_lon", "gt1r/segment_quality/segment_id", "gt1r/segment_quality/signal_selection_source", "gt1r/segment_quality/signal_selection_status/signal_selection_status_all", "gt1r/segment_quality/signal_selection_status/signal_selection_status_backup", "gt1r/segment_quality/signal_selection_status/signal_selection_status_confident", "gt2l/land_ice_segments/atl06_quality_summary", "gt2l/land_ice_segments/delta_time", "gt2l/land_ice_segments/h_li", "gt2l/land_ice_segments/h_li_sigma", "gt2l/land_ice_segments/latitude", "gt2l/land_ice_segments/longitude", "gt2l/land_ice_segments/segment_id", "gt2l/land_ice_segments/sigma_geo_h", "gt2l/land_ice_segments/bias_correction/fpb_mean_corr", "gt2l/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt2l/land_ice_segments/bias_correction/fpb_med_corr", "gt2l/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt2l/land_ice_segments/bias_correction/fpb_n_corr", "gt2l/land_ice_segments/bias_correction/med_r_fit", "gt2l/land_ice_segments/bias_correction/tx_mean_corr", "gt2l/land_ice_segments/bias_correction/tx_med_corr", "gt2l/land_ice_segments/dem/dem_flag", "gt2l/land_ice_segments/dem/dem_h", "gt2l/land_ice_segments/dem/geoid_free2mean", "gt2l/land_ice_segments/dem/geoid_h", "gt2l/land_ice_segments/fit_statistics/dh_fit_dx", "gt2l/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt2l/land_ice_segments/fit_statistics/dh_fit_dy", "gt2l/land_ice_segments/fit_statistics/h_expected_rms", "gt2l/land_ice_segments/fit_statistics/h_mean", "gt2l/land_ice_segments/fit_statistics/h_rms_misfit", "gt2l/land_ice_segments/fit_statistics/h_robust_sprd", "gt2l/land_ice_segments/fit_statistics/n_fit_photons", "gt2l/land_ice_segments/fit_statistics/n_seg_pulses", "gt2l/land_ice_segments/fit_statistics/sigma_h_mean", "gt2l/land_ice_segments/fit_statistics/signal_selection_source", "gt2l/land_ice_segments/fit_statistics/signal_selection_source_status", "gt2l/land_ice_segments/fit_statistics/snr", "gt2l/land_ice_segments/fit_statistics/snr_significance", "gt2l/land_ice_segments/fit_statistics/w_surface_window_final", "gt2l/land_ice_segments/geophysical/bckgrd", "gt2l/land_ice_segments/geophysical/bsnow_conf", "gt2l/land_ice_segments/geophysical/bsnow_h", "gt2l/land_ice_segments/geophysical/bsnow_od", "gt2l/land_ice_segments/geophysical/cloud_flg_asr", "gt2l/land_ice_segments/geophysical/cloud_flg_atm", "gt2l/land_ice_segments/geophysical/dac", "gt2l/land_ice_segments/geophysical/e_bckgrd", "gt2l/land_ice_segments/geophysical/layer_flag", "gt2l/land_ice_segments/geophysical/msw_flag", "gt2l/land_ice_segments/geophysical/neutat_delay_total", "gt2l/land_ice_segments/geophysical/r_eff", "gt2l/land_ice_segments/geophysical/solar_azimuth", "gt2l/land_ice_segments/geophysical/solar_elevation", "gt2l/land_ice_segments/geophysical/tide_earth", "gt2l/land_ice_segments/geophysical/tide_earth_free2mean", "gt2l/land_ice_segments/geophysical/tide_equilibrium", "gt2l/land_ice_segments/geophysical/tide_load", "gt2l/land_ice_segments/geophysical/tide_ocean", "gt2l/land_ice_segments/geophysical/tide_pole", "gt2l/land_ice_segments/ground_track/ref_azimuth", "gt2l/land_ice_segments/ground_track/ref_coelv", "gt2l/land_ice_segments/ground_track/seg_azimuth", "gt2l/land_ice_segments/ground_track/sigma_geo_at", "gt2l/land_ice_segments/ground_track/sigma_geo_r", "gt2l/land_ice_segments/ground_track/sigma_geo_xt", "gt2l/land_ice_segments/ground_track/x_atc", "gt2l/land_ice_segments/ground_track/y_atc", "gt2l/residual_histogram/bckgrd_per_m", "gt2l/residual_histogram/bin_top_h", "gt2l/residual_histogram/count", "gt2l/residual_histogram/delta_time", "gt2l/residual_histogram/ds_segment_id", "gt2l/residual_histogram/lat_mean", "gt2l/residual_histogram/lon_mean", "gt2l/residual_histogram/pulse_count", "gt2l/residual_histogram/segment_id_list", "gt2l/residual_histogram/x_atc_mean", "gt2l/segment_quality/delta_time", "gt2l/segment_quality/record_number", "gt2l/segment_quality/reference_pt_lat", "gt2l/segment_quality/reference_pt_lon", "gt2l/segment_quality/segment_id", "gt2l/segment_quality/signal_selection_source", "gt2l/segment_quality/signal_selection_status/signal_selection_status_all", "gt2l/segment_quality/signal_selection_status/signal_selection_status_backup", "gt2l/segment_quality/signal_selection_status/signal_selection_status_confident", "gt2r/land_ice_segments/atl06_quality_summary", "gt2r/land_ice_segments/delta_time", "gt2r/land_ice_segments/h_li", "gt2r/land_ice_segments/h_li_sigma", "gt2r/land_ice_segments/latitude", "gt2r/land_ice_segments/longitude", "gt2r/land_ice_segments/segment_id", "gt2r/land_ice_segments/sigma_geo_h", "gt2r/land_ice_segments/bias_correction/fpb_mean_corr", "gt2r/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt2r/land_ice_segments/bias_correction/fpb_med_corr", "gt2r/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt2r/land_ice_segments/bias_correction/fpb_n_corr", "gt2r/land_ice_segments/bias_correction/med_r_fit", "gt2r/land_ice_segments/bias_correction/tx_mean_corr", "gt2r/land_ice_segments/bias_correction/tx_med_corr", "gt2r/land_ice_segments/dem/dem_flag", "gt2r/land_ice_segments/dem/dem_h", "gt2r/land_ice_segments/dem/geoid_free2mean", "gt2r/land_ice_segments/dem/geoid_h", "gt2r/land_ice_segments/fit_statistics/dh_fit_dx", "gt2r/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt2r/land_ice_segments/fit_statistics/dh_fit_dy", "gt2r/land_ice_segments/fit_statistics/h_expected_rms", "gt2r/land_ice_segments/fit_statistics/h_mean", "gt2r/land_ice_segments/fit_statistics/h_rms_misfit", "gt2r/land_ice_segments/fit_statistics/h_robust_sprd", "gt2r/land_ice_segments/fit_statistics/n_fit_photons", "gt2r/land_ice_segments/fit_statistics/n_seg_pulses", "gt2r/land_ice_segments/fit_statistics/sigma_h_mean", "gt2r/land_ice_segments/fit_statistics/signal_selection_source", "gt2r/land_ice_segments/fit_statistics/signal_selection_source_status", "gt2r/land_ice_segments/fit_statistics/snr", "gt2r/land_ice_segments/fit_statistics/snr_significance", "gt2r/land_ice_segments/fit_statistics/w_surface_window_final", "gt2r/land_ice_segments/geophysical/bckgrd", "gt2r/land_ice_segments/geophysical/bsnow_conf", "gt2r/land_ice_segments/geophysical/bsnow_h", "gt2r/land_ice_segments/geophysical/bsnow_od", "gt2r/land_ice_segments/geophysical/cloud_flg_asr", "gt2r/land_ice_segments/geophysical/cloud_flg_atm", "gt2r/land_ice_segments/geophysical/dac", "gt2r/land_ice_segments/geophysical/e_bckgrd", "gt2r/land_ice_segments/geophysical/layer_flag", "gt2r/land_ice_segments/geophysical/msw_flag", "gt2r/land_ice_segments/geophysical/neutat_delay_total", "gt2r/land_ice_segments/geophysical/r_eff", "gt2r/land_ice_segments/geophysical/solar_azimuth", "gt2r/land_ice_segments/geophysical/solar_elevation", "gt2r/land_ice_segments/geophysical/tide_earth", "gt2r/land_ice_segments/geophysical/tide_earth_free2mean", "gt2r/land_ice_segments/geophysical/tide_equilibrium", "gt2r/land_ice_segments/geophysical/tide_load", "gt2r/land_ice_segments/geophysical/tide_ocean", "gt2r/land_ice_segments/geophysical/tide_pole", "gt2r/land_ice_segments/ground_track/ref_azimuth", "gt2r/land_ice_segments/ground_track/ref_coelv", "gt2r/land_ice_segments/ground_track/seg_azimuth", "gt2r/land_ice_segments/ground_track/sigma_geo_at", "gt2r/land_ice_segments/ground_track/sigma_geo_r", "gt2r/land_ice_segments/ground_track/sigma_geo_xt", "gt2r/land_ice_segments/ground_track/x_atc", "gt2r/land_ice_segments/ground_track/y_atc", "gt2r/residual_histogram/bckgrd_per_m", "gt2r/residual_histogram/bin_top_h", "gt2r/residual_histogram/count", "gt2r/residual_histogram/delta_time", "gt2r/residual_histogram/ds_segment_id", "gt2r/residual_histogram/lat_mean", "gt2r/residual_histogram/lon_mean", "gt2r/residual_histogram/pulse_count", "gt2r/residual_histogram/segment_id_list", "gt2r/residual_histogram/x_atc_mean", "gt2r/segment_quality/delta_time", "gt2r/segment_quality/record_number", "gt2r/segment_quality/reference_pt_lat", "gt2r/segment_quality/reference_pt_lon", "gt2r/segment_quality/segment_id", "gt2r/segment_quality/signal_selection_source", "gt2r/segment_quality/signal_selection_status/signal_selection_status_all", "gt2r/segment_quality/signal_selection_status/signal_selection_status_backup", "gt2r/segment_quality/signal_selection_status/signal_selection_status_confident", "gt3l/land_ice_segments/atl06_quality_summary", "gt3l/land_ice_segments/delta_time", "gt3l/land_ice_segments/h_li", "gt3l/land_ice_segments/h_li_sigma", "gt3l/land_ice_segments/latitude", "gt3l/land_ice_segments/longitude", "gt3l/land_ice_segments/segment_id", "gt3l/land_ice_segments/sigma_geo_h", "gt3l/land_ice_segments/bias_correction/fpb_mean_corr", "gt3l/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt3l/land_ice_segments/bias_correction/fpb_med_corr", "gt3l/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt3l/land_ice_segments/bias_correction/fpb_n_corr", "gt3l/land_ice_segments/bias_correction/med_r_fit", "gt3l/land_ice_segments/bias_correction/tx_mean_corr", "gt3l/land_ice_segments/bias_correction/tx_med_corr", "gt3l/land_ice_segments/dem/dem_flag", "gt3l/land_ice_segments/dem/dem_h", "gt3l/land_ice_segments/dem/geoid_free2mean", "gt3l/land_ice_segments/dem/geoid_h", "gt3l/land_ice_segments/fit_statistics/dh_fit_dx", "gt3l/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt3l/land_ice_segments/fit_statistics/dh_fit_dy", "gt3l/land_ice_segments/fit_statistics/h_expected_rms", "gt3l/land_ice_segments/fit_statistics/h_mean", "gt3l/land_ice_segments/fit_statistics/h_rms_misfit", "gt3l/land_ice_segments/fit_statistics/h_robust_sprd", "gt3l/land_ice_segments/fit_statistics/n_fit_photons", "gt3l/land_ice_segments/fit_statistics/n_seg_pulses", "gt3l/land_ice_segments/fit_statistics/sigma_h_mean", "gt3l/land_ice_segments/fit_statistics/signal_selection_source", "gt3l/land_ice_segments/fit_statistics/signal_selection_source_status", "gt3l/land_ice_segments/fit_statistics/snr", "gt3l/land_ice_segments/fit_statistics/snr_significance", "gt3l/land_ice_segments/fit_statistics/w_surface_window_final", "gt3l/land_ice_segments/geophysical/bckgrd", "gt3l/land_ice_segments/geophysical/bsnow_conf", "gt3l/land_ice_segments/geophysical/bsnow_h", "gt3l/land_ice_segments/geophysical/bsnow_od", "gt3l/land_ice_segments/geophysical/cloud_flg_asr", "gt3l/land_ice_segments/geophysical/cloud_flg_atm", "gt3l/land_ice_segments/geophysical/dac", "gt3l/land_ice_segments/geophysical/e_bckgrd", "gt3l/land_ice_segments/geophysical/layer_flag", "gt3l/land_ice_segments/geophysical/msw_flag", "gt3l/land_ice_segments/geophysical/neutat_delay_total", "gt3l/land_ice_segments/geophysical/r_eff", "gt3l/land_ice_segments/geophysical/solar_azimuth", "gt3l/land_ice_segments/geophysical/solar_elevation", "gt3l/land_ice_segments/geophysical/tide_earth", "gt3l/land_ice_segments/geophysical/tide_earth_free2mean", "gt3l/land_ice_segments/geophysical/tide_equilibrium", "gt3l/land_ice_segments/geophysical/tide_load", "gt3l/land_ice_segments/geophysical/tide_ocean", "gt3l/land_ice_segments/geophysical/tide_pole", "gt3l/land_ice_segments/ground_track/ref_azimuth", "gt3l/land_ice_segments/ground_track/ref_coelv", "gt3l/land_ice_segments/ground_track/seg_azimuth", "gt3l/land_ice_segments/ground_track/sigma_geo_at", "gt3l/land_ice_segments/ground_track/sigma_geo_r", "gt3l/land_ice_segments/ground_track/sigma_geo_xt", "gt3l/land_ice_segments/ground_track/x_atc", "gt3l/land_ice_segments/ground_track/y_atc", "gt3l/residual_histogram/bckgrd_per_m", "gt3l/residual_histogram/bin_top_h", "gt3l/residual_histogram/count", "gt3l/residual_histogram/delta_time", "gt3l/residual_histogram/ds_segment_id", "gt3l/residual_histogram/lat_mean", "gt3l/residual_histogram/lon_mean", "gt3l/residual_histogram/pulse_count", "gt3l/residual_histogram/segment_id_list", "gt3l/residual_histogram/x_atc_mean", "gt3l/segment_quality/delta_time", "gt3l/segment_quality/record_number", "gt3l/segment_quality/reference_pt_lat", "gt3l/segment_quality/reference_pt_lon", "gt3l/segment_quality/segment_id", "gt3l/segment_quality/signal_selection_source", "gt3l/segment_quality/signal_selection_status/signal_selection_status_all", "gt3l/segment_quality/signal_selection_status/signal_selection_status_backup", "gt3l/segment_quality/signal_selection_status/signal_selection_status_confident", "gt3r/land_ice_segments/atl06_quality_summary", "gt3r/land_ice_segments/delta_time", "gt3r/land_ice_segments/h_li", "gt3r/land_ice_segments/h_li_sigma", "gt3r/land_ice_segments/latitude", "gt3r/land_ice_segments/longitude", "gt3r/land_ice_segments/segment_id", "gt3r/land_ice_segments/sigma_geo_h", "gt3r/land_ice_segments/bias_correction/fpb_mean_corr", "gt3r/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt3r/land_ice_segments/bias_correction/fpb_med_corr", "gt3r/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt3r/land_ice_segments/bias_correction/fpb_n_corr", "gt3r/land_ice_segments/bias_correction/med_r_fit", "gt3r/land_ice_segments/bias_correction/tx_mean_corr", "gt3r/land_ice_segments/bias_correction/tx_med_corr", "gt3r/land_ice_segments/dem/dem_flag", "gt3r/land_ice_segments/dem/dem_h", "gt3r/land_ice_segments/dem/geoid_free2mean", "gt3r/land_ice_segments/dem/geoid_h", "gt3r/land_ice_segments/fit_statistics/dh_fit_dx", "gt3r/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt3r/land_ice_segments/fit_statistics/dh_fit_dy", "gt3r/land_ice_segments/fit_statistics/h_expected_rms", "gt3r/land_ice_segments/fit_statistics/h_mean", "gt3r/land_ice_segments/fit_statistics/h_rms_misfit", "gt3r/land_ice_segments/fit_statistics/h_robust_sprd", "gt3r/land_ice_segments/fit_statistics/n_fit_photons", "gt3r/land_ice_segments/fit_statistics/n_seg_pulses", "gt3r/land_ice_segments/fit_statistics/sigma_h_mean", "gt3r/land_ice_segments/fit_statistics/signal_selection_source", "gt3r/land_ice_segments/fit_statistics/signal_selection_source_status", "gt3r/land_ice_segments/fit_statistics/snr", "gt3r/land_ice_segments/fit_statistics/snr_significance", "gt3r/land_ice_segments/fit_statistics/w_surface_window_final", "gt3r/land_ice_segments/geophysical/bckgrd", "gt3r/land_ice_segments/geophysical/bsnow_conf", "gt3r/land_ice_segments/geophysical/bsnow_h", "gt3r/land_ice_segments/geophysical/bsnow_od", "gt3r/land_ice_segments/geophysical/cloud_flg_asr", "gt3r/land_ice_segments/geophysical/cloud_flg_atm", "gt3r/land_ice_segments/geophysical/dac", "gt3r/land_ice_segments/geophysical/e_bckgrd", "gt3r/land_ice_segments/geophysical/layer_flag", "gt3r/land_ice_segments/geophysical/msw_flag", "gt3r/land_ice_segments/geophysical/neutat_delay_total", "gt3r/land_ice_segments/geophysical/r_eff", "gt3r/land_ice_segments/geophysical/solar_azimuth", "gt3r/land_ice_segments/geophysical/solar_elevation", "gt3r/land_ice_segments/geophysical/tide_earth", "gt3r/land_ice_segments/geophysical/tide_earth_free2mean", "gt3r/land_ice_segments/geophysical/tide_equilibrium", "gt3r/land_ice_segments/geophysical/tide_load", "gt3r/land_ice_segments/geophysical/tide_ocean", "gt3r/land_ice_segments/geophysical/tide_pole", "gt3r/land_ice_segments/ground_track/ref_azimuth", "gt3r/land_ice_segments/ground_track/ref_coelv", "gt3r/land_ice_segments/ground_track/seg_azimuth", "gt3r/land_ice_segments/ground_track/sigma_geo_at", "gt3r/land_ice_segments/ground_track/sigma_geo_r", "gt3r/land_ice_segments/ground_track/sigma_geo_xt", "gt3r/land_ice_segments/ground_track/x_atc", "gt3r/land_ice_segments/ground_track/y_atc", "gt3r/residual_histogram/bckgrd_per_m", "gt3r/residual_histogram/bin_top_h", "gt3r/residual_histogram/count", "gt3r/residual_histogram/delta_time", "gt3r/residual_histogram/ds_segment_id", "gt3r/residual_histogram/lat_mean", "gt3r/residual_histogram/lon_mean", "gt3r/residual_histogram/pulse_count", "gt3r/residual_histogram/segment_id_list", "gt3r/residual_histogram/x_atc_mean", "gt3r/segment_quality/delta_time", "gt3r/segment_quality/record_number", "gt3r/segment_quality/reference_pt_lat", "gt3r/segment_quality/reference_pt_lon", "gt3r/segment_quality/segment_id", "gt3r/segment_quality/signal_selection_source", "gt3r/segment_quality/signal_selection_status/signal_selection_status_all", "gt3r/segment_quality/signal_selection_status/signal_selection_status_backup", "gt3r/segment_quality/signal_selection_status/signal_selection_status_confident", "orbit_info/bounding_polygon_lat1", "orbit_info/bounding_polygon_lon1", "orbit_info/crossing_time", "orbit_info/cycle_number", "orbit_info/lan", "orbit_info/orbit_number", "orbit_info/rgt", "orbit_info/sc_orient", "orbit_info/sc_orient_time", "quality_assessment/qa_granule_fail_reason", "quality_assessment/qa_granule_pass_fail", "quality_assessment/gt1l/delta_time", "quality_assessment/gt1l/lat_mean", "quality_assessment/gt1l/lon_mean", "quality_assessment/gt1l/signal_selection_source_fraction_0", "quality_assessment/gt1l/signal_selection_source_fraction_1", "quality_assessment/gt1l/signal_selection_source_fraction_2", "quality_assessment/gt1l/signal_selection_source_fraction_3", "quality_assessment/gt1r/delta_time", "quality_assessment/gt1r/lat_mean", "quality_assessment/gt1r/lon_mean", "quality_assessment/gt1r/signal_selection_source_fraction_0", "quality_assessment/gt1r/signal_selection_source_fraction_1", "quality_assessment/gt1r/signal_selection_source_fraction_2", "quality_assessment/gt1r/signal_selection_source_fraction_3", "quality_assessment/gt2l/delta_time", "quality_assessment/gt2l/lat_mean", "quality_assessment/gt2l/lon_mean", "quality_assessment/gt2l/signal_selection_source_fraction_0", "quality_assessment/gt2l/signal_selection_source_fraction_1", "quality_assessment/gt2l/signal_selection_source_fraction_2", "quality_assessment/gt2l/signal_selection_source_fraction_3", "quality_assessment/gt2r/delta_time", "quality_assessment/gt2r/lat_mean", "quality_assessment/gt2r/lon_mean", "quality_assessment/gt2r/signal_selection_source_fraction_0", "quality_assessment/gt2r/signal_selection_source_fraction_1", "quality_assessment/gt2r/signal_selection_source_fraction_2", "quality_assessment/gt2r/signal_selection_source_fraction_3", "quality_assessment/gt3l/delta_time", "quality_assessment/gt3l/lat_mean", "quality_assessment/gt3l/lon_mean", "quality_assessment/gt3l/signal_selection_source_fraction_0", "quality_assessment/gt3l/signal_selection_source_fraction_1", "quality_assessment/gt3l/signal_selection_source_fraction_2", "quality_assessment/gt3l/signal_selection_source_fraction_3", "quality_assessment/gt3r/delta_time", "quality_assessment/gt3r/lat_mean", "quality_assessment/gt3r/lon_mean", "quality_assessment/gt3r/signal_selection_source_fraction_0", "quality_assessment/gt3r/signal_selection_source_fraction_1", "quality_assessment/gt3r/signal_selection_source_fraction_2", "quality_assessment/gt3r/signal_selection_source_fraction_3"]} diff --git a/icepyx/tests/test_APIformatting.py b/icepyx/tests/test_APIformatting.py index d934a97dd..6dfe98353 100644 --- a/icepyx/tests/test_APIformatting.py +++ b/icepyx/tests/test_APIformatting.py @@ -1,4 +1,3 @@ -import pytest import datetime as dt import icepyx.core.APIformatting as apifmt @@ -103,18 +102,52 @@ def test_combine_params(): ############ to_string ############# def test_to_string(): CMRparams = { + "temporal": "2019-02-20T00:00:00Z,2019-02-28T23:59:59Z", + "bounding_box": "-55,68,-48,71", + } + reqparams = { "short_name": "ATL06", "version": "002", - "temporal": "2019-02-20T00:00:00Z,2019-02-28T23:59:59Z", + "page_size": 2000, + "page_num": 1, + } + params = apifmt.combine_params(CMRparams, reqparams) + obs = apifmt.to_string(params) + expected = ( + "temporal=2019-02-20T00:00:00Z,2019-02-28T23:59:59Z" + "&bounding_box=-55,68,-48,71" + "&short_name=ATL06&version=002" + "&page_size=2000&page_num=1" + ) + assert obs == expected + + +def test_to_string_with_list(): + CMRparams = { + "options[readable_granule_name][pattern]": "true", + "options[spatial][or]": "true", + "readable_granule_name[]": [ + "ATL06_??????????????_084903??_*", + "ATL06_??????????????_090203??_*", + ], "bounding_box": "-55,68,-48,71", } - reqparams = {"page_size": 2000, "page_num": 1} + reqparams = { + "short_name": "ATL06", + "version": "002", + "page_size": 2000, + "page_num": 1, + } params = apifmt.combine_params(CMRparams, reqparams) obs = apifmt.to_string(params) expected = ( - "short_name=ATL06&version=002" - "&temporal=2019-02-20T00:00:00Z,2019-02-28T23:59:59Z" - "&bounding_box=-55,68,-48,71&page_size=2000&page_num=1" + "options[readable_granule_name][pattern]=true" + "&options[spatial][or]=true" + "&readable_granule_name[]=ATL06_??????????????_084903??_*" + "&readable_granule_name[]=ATL06_??????????????_090203??_*" + "&bounding_box=-55,68,-48,71" + "&short_name=ATL06&version=002" + "&page_size=2000&page_num=1" ) assert obs == expected @@ -131,7 +164,6 @@ def test_CMRparams_no_other_inputs(): CMRparams = apifmt.Parameters("CMR") # TestQuestion: the next statement essentially tests _get_possible_keys as well, so how would I test them independently? assert CMRparams.poss_keys == { - "default": ["short_name", "version"], "spatial": ["bounding_box", "polygon"], "optional": [ "temporal", @@ -143,14 +175,9 @@ def test_CMRparams_no_other_inputs(): assert CMRparams.fmted_keys == {} assert CMRparams._check_valid_keys # Note: this test must be done before the next one - if CMRparams.partype == "required": - assert CMRparams.check_req_values() == False - else: - assert CMRparams.check_values() == False + assert CMRparams.check_values() is False CMRparams.build_params( - product="ATL06", - version="006", start=dt.datetime(2019, 2, 20, 0, 0), end=dt.datetime(2019, 2, 24, 23, 59, 59), extent_type="bounding_box", @@ -158,8 +185,6 @@ def test_CMRparams_no_other_inputs(): ) obs_fmted_params = CMRparams.fmted_keys exp_fmted_params = { - "short_name": "ATL06", - "version": "006", "temporal": "2019-02-20T00:00:00Z,2019-02-24T23:59:59Z", "bounding_box": "-55.0,68.0,-48.0,71.0", } diff --git a/icepyx/tests/test_granules.py b/icepyx/tests/test_granules.py index e8be99f39..4e12c0eb3 100644 --- a/icepyx/tests/test_granules.py +++ b/icepyx/tests/test_granules.py @@ -29,6 +29,9 @@ # region_a = ipx.Query(short_name, spatial_extent, date_range) # region_a.avail_granules(ids=True) +# add test that s3urls are gotten for ALL products (e.g. ATL15 was failing +# due to .nc extention instead of .h5)) + # DevNote: clearly there's a better way that doesn't make the function so long... # what is it? @@ -636,6 +639,6 @@ def test_avail_granule_CMR_error(): ermsg = "An error was returned from NSIDC in regards to your query: temporal start datetime is invalid: [badinput] is not a valid datetime." with pytest.raises(NsidcQueryError, match=re.escape(ermsg)): - CMRparams = {"version": "003", "temporal": "badinput", "short_name": "ATL08"} - reqparams = {"page_size": 1} + CMRparams = {"temporal": "badinput"} + reqparams = {"version": "003", "short_name": "ATL08", "page_size": 1} Granules().get_avail(CMRparams=CMRparams, reqparams=reqparams) diff --git a/icepyx/tests/test_is2ref.py b/icepyx/tests/test_is2ref.py index a07a6b948..fcb51c804 100644 --- a/icepyx/tests/test_is2ref.py +++ b/icepyx/tests/test_is2ref.py @@ -7,14 +7,20 @@ def test_num_product(): dsnum = 6 - ermsg = "A valid product string was not provided. Check user input, if given, or file metadata." + ermsg = ( + "A valid product string was not provided. " + "Check user input, if given, or file metadata." + ) with pytest.raises(TypeError, match=ermsg): is2ref._validate_product(dsnum) def test_bad_product(): wrngds = "atl-6" - ermsg = "A valid product string was not provided. Check user input, if given, or file metadata." + ermsg = ( + "A valid product string was not provided. " + "Check user input, if given, or file metadata." + ) with pytest.raises(AssertionError, match=ermsg): is2ref._validate_product(wrngds) diff --git a/icepyx/tests/test_read.py b/icepyx/tests/test_read.py index 67b29b598..20807c410 100644 --- a/icepyx/tests/test_read.py +++ b/icepyx/tests/test_read.py @@ -1,97 +1,85 @@ import pytest -from icepyx.core.read import Read import icepyx.core.read as read -def test_check_datasource_type(): - ermesg = "filepath must be a string or Path" +# note isdir will issue a TypeError if a tuple is passed +def test_parse_source_bad_input_type(): + ermesg = ( + "data_source should be a list of files, a directory, the path to a file, " + "or a glob string." + ) with pytest.raises(TypeError, match=ermesg): - read._check_datasource(246) - - -@pytest.mark.parametrize( - "filepath, expect", - [ - ("./", "is2_local"), - ( - """s3://nsidc-cumulus-prod-protected/ATLAS/ - ATL03/006/2019/11/30/ATL03_20191130221008_09930503_006_01.h5""", - "is2_s3", - ), - ], -) -def test_check_datasource(filepath, expect): - source_type = read._check_datasource(filepath) - assert source_type == expect - - -# not sure what I could enter here would get to the else... -# def test_unknown_datasource_type(): -# ermesg = "Could not confirm the datasource type." -# with pytest.raises(ValueError, match=ermesg): -# read._check_datasource("") - + read._parse_source(150) + read._parse_source({"myfiles": "./my_valid_path/file.h5"}) -def test_validate_source_str_given_as_list(): - ermesg = "You must enter your input as a string." - with pytest.raises(AssertionError, match=ermesg): - read._validate_source(["/path/to/valid/ATL06_file.py"]) - -def test_validate_source_str_not_a_dir_or_file(): - ermesg = "Your data source string is not a valid data source." - with pytest.raises(AssertionError, match=ermesg): - read._validate_source("./fake/dirpath") - read._validate_source("./fake_file.h5") +def test_parse_source_no_files(): + ermesg = ( + "No files found matching the specified `data_source`. Check your glob " + "string or file list." + ) + with pytest.raises(KeyError, match=ermesg): + read._parse_source("./icepyx/bogus_glob") @pytest.mark.parametrize( - "dir, fn_glob, expect", + "source, expect", [ - ( - "./icepyx/", - "is2*.py", - ( - sorted( - [ - "./icepyx/core", - "./icepyx/quest", - "./icepyx/quest/dataset_scripts", - "./icepyx/tests", - ] - ), - sorted( - [ - "./icepyx/core/is2ref.py", - "./icepyx/tests/is2class_query.py", - ] - ), + ( # check list input + [ + "./icepyx/core/is2ref.py", + "./icepyx/tests/is2class_query.py", + ], + sorted( + [ + "./icepyx/core/is2ref.py", + "./icepyx/tests/is2class_query.py", + ] ), ), - ( - "./icepyx/core", - "is2*.py", - ([], ["./icepyx/core/is2ref.py"]), + ( # check dir input + "./examples", + [ + "./examples/README.md", + ], ), - ( - "./icepyx", - "bogus_glob", - ( + ( # check filename string with glob pattern input + "./icepyx/**/is2*.py", + sorted( [ - "./icepyx/core", - "./icepyx/quest", - "./icepyx/quest/dataset_scripts", - "./icepyx/tests", - ], - [], + "./icepyx/core/is2ref.py", + "./icepyx/tests/is2class_query.py", + ] + ), + ), + ( # check filename string without glob pattern input + "./icepyx/core/is2ref.py", + [ + "./icepyx/core/is2ref.py", + ], + ), + ( # check s3 filename string + ( + "s3://nsidc-cumulus-prod-protected/ATLAS/" + "ATL03/006/2019/11/30/ATL03_20191130221008_09930503_006_01.h5" ), + [ + ( + "s3://nsidc-cumulus-prod-protected/ATLAS/" + "ATL03/006/2019/11/30/ATL03_20191130221008_09930503_006_01.h5" + ), + ], + ), + ( + "./icepyx/core/is2*.py", + ["./icepyx/core/is2ref.py"], ), ], ) -def test_check_run_fast_scandir(dir, fn_glob, expect): - (subfolders, files) = read._run_fast_scandir(dir, fn_glob) - assert (sorted(subfolders), sorted(files)) == expect +def test_parse_source(source, expect): + filelist = read._parse_source(source, glob_kwargs={"recursive": True}) + assert (sorted(filelist)) == expect @pytest.mark.parametrize( @@ -114,18 +102,3 @@ def test_get_track_type_str( exp_spot_dim_name, exp_spot_var_name, ) - - -# Best way to test this may be by including a small sample file with the repo -# (which can be used for testing some of the catalog/read-in functions as well) -# def test_invalid_filename_pattern_in_file(): -# ermesg = "Your input filename does not match the specified pattern." -# default_pattern = Read("/path/to/valid/source/file")._filename_pattern -# with pytest.raises(AssertionError, match=ermesg): -# read._validate_source('/valid/filepath/with/non-default/filename/pattern.h5', default_pattern) - -# def test_invalid_filename_pattern_in_dir(): -# ermesg = "None of your filenames match the specified pattern." -# default_pattern = Read("/path/to/valid/dir/")._filename_pattern -# with pytest.raises(AssertionError, match=ermesg): -# read._validate_source('/valid/dirpath/with/non-default/filename/pattern.h5', default_pattern) diff --git a/icepyx/tests/test_validate_inputs.py b/icepyx/tests/test_validate_inputs.py index 0b5f2f2eb..4d0ea0bd5 100644 --- a/icepyx/tests/test_validate_inputs.py +++ b/icepyx/tests/test_validate_inputs.py @@ -1,7 +1,4 @@ import pytest -import warnings -import datetime as dt -import numpy as np import icepyx.core.validate_inputs as val @@ -70,3 +67,35 @@ def test_tracks_valid(): val.tracks(1388) # check that warning message matches expected assert record[0].message.args[0] == expmsg + + +@pytest.mark.parametrize( + "filepath, expect", + [ + ("./", "./"), + ( + """s3://nsidc-cumulus-prod-protected/ATLAS/ + ATL03/006/2019/11/30/ATL03_20191130221008_09930503_006_01.h5""", + """s3://nsidc-cumulus-prod-protected/ATLAS/ + ATL03/006/2019/11/30/ATL03_20191130221008_09930503_006_01.h5""", + ), + ], +) +def test_check_s3bucket(filepath, expect): + verified_path = val.check_s3bucket(filepath) + assert verified_path == expect + + +def test_wrong_s3bucket(): + filepath = """s3://notnsidc-cumulus-prod-protected/ATLAS/ + ATL03/006/2019/11/30/ATL03_20191130221008_09930503_006_01.h5""" + + expmsg = ( + "s3 data being read from outside the NSIDC data bucket. Icepyx can " + "read this data, but available data lists may not be accurate." + ) + + with pytest.warns(UserWarning) as record: + val.check_s3bucket(filepath) + + assert record[0].message.args[0] == expmsg diff --git a/pytest.ini b/pytest.ini index 75061ecad..d9bafb88f 100644 --- a/pytest.ini +++ b/pytest.ini @@ -3,4 +3,4 @@ minversion = 2.0 norecursedirs = .git python_files = test*.py addopts = --cov=./ --doctest-modules -doctest_optionflags = NORMALIZE_WHITESPACE NUMBER \ No newline at end of file +doctest_optionflags = NORMALIZE_WHITESPACE NUMBER diff --git a/readthedocs.yml b/readthedocs.yml index 503321bde..dba60979f 100644 --- a/readthedocs.yml +++ b/readthedocs.yml @@ -27,5 +27,5 @@ python: install: - requirements: requirements-docs.txt - requirements: requirements.txt - - method: setuptools + - method: pip path: . diff --git a/requirements-dev.txt b/requirements-dev.txt index e0fc68545..6a0e3eba2 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,4 +4,4 @@ pre-commit pypistats pytest>=4.6 pytest-cov -responses \ No newline at end of file +responses diff --git a/requirements-docs.txt b/requirements-docs.txt index 2a3cb3684..51dc9ff5a 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -8,4 +8,4 @@ pygithub sphinx>=4.3 sphinx-panels sphinx_rtd_theme>=1.0 -sphinxcontrib-bibtex \ No newline at end of file +sphinxcontrib-bibtex diff --git a/requirements.txt b/requirements.txt index 06f4ad9a7..6a9659270 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ backoff +dask[dataframe] datashader earthaccess>=0.5.1 fiona @@ -12,4 +13,4 @@ numpy requests s3fs shapely -xarray \ No newline at end of file +xarray